JSON.Net Serialize Directly from Oledbconnection

JSON.net serialize directly from oledbconnection

Firstly, you should stop serializing to an intermediate string and instead serialize directly to the HttpResponse.OutputStream, using the following simple methods:

public static class JsonExtensions
{
public static void SerializeToStream(object value, System.Web.HttpResponse response, JsonSerializerSettings settings = null)
{
if (response == null)
throw new ArgumentNullException("response");
SerializeToStream(value, response.OutputStream, settings);
}

public static void SerializeToStream(object value, TextWriter writer, JsonSerializerSettings settings = null)
{
if (writer == null)
throw new ArgumentNullException("writer");
var serializer = JsonSerializer.CreateDefault(settings);
serializer.Serialize(writer, value);
}

public static void SerializeToStream(object value, Stream stream, JsonSerializerSettings settings = null)
{
if (stream == null)
throw new ArgumentNullException("stream");
using (var writer = new StreamWriter(stream))
{
SerializeToStream(value, writer, settings);
}
}
}

Since a large string requires a large contiguous block of memory for the underlying char array, that's where you are going to run out of memory first. See also Json.NET's Performance Tips

To minimize memory usage and the number of objects allocated, Json.NET supports serializing and deserializing directly to a stream. Reading or writing JSON a piece at a time, instead of having the entire JSON string loaded into memory, is especially important when working with JSON documents greater than 85kb in size to avoid the JSON string ending up in the large object heap.

Next, be sure to wrap all your disposables in a using statement, as is shown below.

That might solve your problem, but if it doesn't, you can serialize an IDataReader to JSON using the following JsonConverter:

public class DataReaderConverter : JsonConverter
{
public override bool CanConvert(Type objectType)
{
return typeof(IDataReader).IsAssignableFrom(objectType);
}

public override bool CanRead { get { return false; } }

public override object ReadJson(JsonReader reader, Type objectType, object existingValue, JsonSerializer serializer)
{
throw new NotImplementedException();
}

public override void WriteJson(JsonWriter writer, object value, JsonSerializer serializer)
{
var reader = (IDataReader)value;
writer.WriteStartArray();
while (reader.Read())
{
writer.WriteStartObject();
for (int i = 0; i < reader.FieldCount; i++)
{
writer.WritePropertyName(reader.GetName(i));
if (reader.IsDBNull(i))
writer.WriteNull();
else
serializer.Serialize(writer, reader[i]);
}
writer.WriteEndObject();
}
writer.WriteEndArray();
}
}

And then serialize to stream as follows:

public static class ExcelExtensions
{
private static string GetExcelConnectionString(string path)
{
string connectionString = string.Empty;
if (path.EndsWith(".xls"))
{
connectionString = String.Format(@"Provider=Microsoft.ACE.OLEDB.12.0;
Data Source={0};
Extended Properties=""Excel 8.0;HDR=YES;IMEX=1""", path);
}
else if (path.EndsWith(".xlsx"))
{
connectionString = String.Format(@"Provider=Microsoft.ACE.OLEDB.12.0;
Data Source={0};
Extended Properties=""Excel 12.0 Xml;HDR=YES;IMEX=1""", path);
}
return connectionString;
}

public static string SerializeJsonToString(string path, string workSheetName, JsonSerializerSettings settings = null)
{
using (var writer = new StringWriter())
{
SerializeJsonToStream(path, workSheetName, writer, settings);
return writer.ToString();
}
}

public static void SerializeJsonToStream(string path, string workSheetName, Stream stream, JsonSerializerSettings settings = null)
{
using (var writer = new StreamWriter(stream))
SerializeJsonToStream(path, workSheetName, writer, settings);
}

public static void SerializeJsonToStream(string path, string workSheetName, TextWriter writer, JsonSerializerSettings settings = null)
{
settings = settings ?? new JsonSerializerSettings();
var converter = new DataReaderConverter();
settings.Converters.Add(converter);
try
{
string connectionString = GetExcelConnectionString(path);
DbProviderFactory factory = DbProviderFactories.GetFactory("System.Data.OleDb");

using (OleDbConnection conn = new OleDbConnection(connectionString))
{
conn.Open();
using (DbCommand selectCommand = factory.CreateCommand())
{
selectCommand.CommandText = String.Format("SELECT * FROM [{0}]", workSheetName);
selectCommand.Connection = conn;

using (var reader = selectCommand.ExecuteReader())
{
JsonExtensions.SerializeToStream(reader, writer, settings);
}
}
}
}
finally
{
settings.Converters.Remove(converter);
}
}
}

Note - lightly tested. Be sure to unit-test this against your existing method before putting it into production! For the converter code I used JSON Serialization of a DataReader as an inspiration.

Update

My converter emits JSON in the same structure as the DataTableConverter of Json.NET. Thus you're going to be able to deserialize to a DataTable automatically using Json.NET. If you prefer a more compact format, you could define your own, for instance:

{
"columns": [
"Name 1",
"Name 2"
],
"rows": [
[
"value 11",
"value 12"
],
[
"value 21",
"value 22"
]
]
}

And them create the following converter:

public class DataReaderArrayConverter : JsonConverter
{
public override bool CanConvert(Type objectType)
{
return typeof(IDataReader).IsAssignableFrom(objectType);
}

public override bool CanRead { get { return false; } }

public override object ReadJson(JsonReader reader, Type objectType, object existingValue, JsonSerializer serializer)
{
throw new NotImplementedException();
}

static string[] GetFieldNames(IDataReader reader)
{
var fieldNames = new string[reader.FieldCount];
for (int i = 0; i < reader.FieldCount; i++)
fieldNames[i] = reader.GetName(i);
return fieldNames;
}

static void ValidateFieldNames(IDataReader reader, string[] fieldNames)
{
if (reader.FieldCount != fieldNames.Length)
throw new InvalidOperationException("Unequal record lengths");
for (int i = 0; i < reader.FieldCount; i++)
if (fieldNames[i] != reader.GetName(i))
throw new InvalidOperationException(string.Format("Field names at index {0} differ: \"{1}\" vs \"{2}\"", i, fieldNames[i], reader.GetName(i)));
}

const string columnsName = "columns";
const string rowsName = "rows";

public override void WriteJson(JsonWriter writer, object value, JsonSerializer serializer)
{
var reader = (IDataReader)value;
writer.WriteStartObject();
string[] fieldNames = null;
while (reader.Read())
{
if (fieldNames == null)
{
writer.WritePropertyName(columnsName);
fieldNames = GetFieldNames(reader);
serializer.Serialize(writer, fieldNames);
writer.WritePropertyName(rowsName);
writer.WriteStartArray();
}
else
{
ValidateFieldNames(reader, fieldNames);
}

writer.WriteStartArray();
for (int i = 0; i < reader.FieldCount; i++)
{
if (reader.IsDBNull(i))
writer.WriteNull();
else
serializer.Serialize(writer, reader[i]);
}
writer.WriteEndArray();
}
if (fieldNames != null)
{
writer.WriteEndArray();
}
writer.WriteEndObject();
}
}

Of course, you'll need to create your own deserialization converter on the client side.

Alternatively, you could consider compressing your response. I've never tried it, but see HttpWebRequest and GZip Http Responses and ASP.NET GZip Encoding Caveats.

Running into OutOfMemoryException when serializing List of Objects which needs to be ported as JSON to a DataTable

It is very likely your string is to big for your memory or the string it self it to big (2gb). You try to serialize a lot of objects. So probably the best way is to split the objects into multiple chucks for serialization. Or you can try another converter like: Newtonsoft. The Newtonsoft serializer is a fast and probably the most used serializer out there. The Newtonsoft serializer is also able to convert to json using a stream: Serialize to stream. Which can be directly written to the Response object.

Newtonsoft.Json.JsonSerializationException (Error getting value from 'Value' on 'System.Data.SqlTypes.SqlDouble) serializing SqlGeography

It looks as though the primitive types such as SqlDouble from System.Data.SqlTypes cannot be serialized out-of-the-box by Json.NET because they do not implement their own TypeConverter. From the docs:

Primitive Types

.Net: TypeConverter (convertible to String)

JSON: String

This it will be necessary to implement a custom JsonConverter to serialize these types. Json.NET has several built-in converters such as KeyValuePairConverter for built-in .Net types so this is not unusual.

The fact that SqlBoolean, SqlBinary, SqlDouble and so on do not share a common base class or interface other than INullable requires some duplicated-looking code:

public static class SqlPrimitiveConverters
{
public static JsonSerializerSettings AddSqlConverters(this JsonSerializerSettings settings)
{
foreach (var converter in converters)
settings.Converters.Add(converter);
return settings;
}

static readonly JsonConverter[] converters = new JsonConverter[]
{
new SqlBinaryConverter(),
new SqlBooleanConverter(),
new SqlByteConverter(),
new SqlDateTimeConverter(),
new SqlDecimalConverter(),
new SqlDoubleConverter(),
new SqlGuidConverter(),
new SqlInt16Converter(),
new SqlInt32Converter(),
new SqlInt64Converter(),
new SqlMoneyConverter(),
new SqlSingleConverter(),
new SqlStringConverter(),
// TODO: converters for primitives from System.Data.SqlTypes that are classes not structs:
// SqlBytes, SqlChars, SqlXml
// Maybe SqlFileStream
};
}

abstract class SqlPrimitiveConverterBase<T> : JsonConverter where T : struct, INullable, IComparable
{
protected abstract object GetValue(T sqlValue);

public override bool CanConvert(Type objectType)
{
return typeof(T) == objectType;
}

public override void WriteJson(JsonWriter writer, object value, JsonSerializer serializer)
{
T sqlValue = (T)value;
if (sqlValue.IsNull)
writer.WriteNull();
else
{
serializer.Serialize(writer, GetValue(sqlValue));
}
}
}

class SqlBinaryConverter : SqlPrimitiveConverterBase<SqlBinary>
{
protected override object GetValue(SqlBinary sqlValue) { return sqlValue.Value; }

public override object ReadJson(JsonReader reader, Type objectType, object existingValue, JsonSerializer serializer)
{
if (reader.TokenType == JsonToken.Null)
return SqlBinary.Null;
return (SqlBinary)serializer.Deserialize<byte[]>(reader);
}
}

class SqlBooleanConverter : SqlPrimitiveConverterBase<SqlBoolean>
{
protected override object GetValue(SqlBoolean sqlValue) { return sqlValue.Value; }

public override object ReadJson(JsonReader reader, Type objectType, object existingValue, JsonSerializer serializer)
{
if (reader.TokenType == JsonToken.Null)
return SqlBoolean.Null;
return (SqlBoolean)serializer.Deserialize<bool>(reader);
}
}

class SqlByteConverter : SqlPrimitiveConverterBase<SqlByte>
{
protected override object GetValue(SqlByte sqlValue) { return sqlValue.Value; }

public override object ReadJson(JsonReader reader, Type objectType, object existingValue, JsonSerializer serializer)
{
if (reader.TokenType == JsonToken.Null)
return SqlByte.Null;
return (SqlByte)serializer.Deserialize<byte>(reader);
}
}

class SqlDateTimeConverter : SqlPrimitiveConverterBase<SqlDateTime>
{
protected override object GetValue(SqlDateTime sqlValue) { return sqlValue.Value; }

public override object ReadJson(JsonReader reader, Type objectType, object existingValue, JsonSerializer serializer)
{
if (reader.TokenType == JsonToken.Null)
return SqlDateTime.Null;
return (SqlDateTime)serializer.Deserialize<DateTime>(reader);
}
}

class SqlDecimalConverter : SqlPrimitiveConverterBase<SqlDecimal>
{
protected override object GetValue(SqlDecimal sqlValue) { return sqlValue.Value; }

public override object ReadJson(JsonReader reader, Type objectType, object existingValue, JsonSerializer serializer)
{
if (reader.TokenType == JsonToken.Null)
return SqlDecimal.Null;
return (SqlDecimal)serializer.Deserialize<decimal>(reader);
}
}

class SqlDoubleConverter : SqlPrimitiveConverterBase<SqlDouble>
{
protected override object GetValue(SqlDouble sqlValue) { return sqlValue.Value; }

public override object ReadJson(JsonReader reader, Type objectType, object existingValue, JsonSerializer serializer)
{
if (reader.TokenType == JsonToken.Null)
return SqlDouble.Null;
return (SqlDouble)serializer.Deserialize<double>(reader);
}
}

class SqlGuidConverter : SqlPrimitiveConverterBase<SqlGuid>
{
protected override object GetValue(SqlGuid sqlValue) { return sqlValue.Value; }

public override object ReadJson(JsonReader reader, Type objectType, object existingValue, JsonSerializer serializer)
{
if (reader.TokenType == JsonToken.Null)
return SqlGuid.Null;
return (SqlGuid)serializer.Deserialize<Guid>(reader);
}
}

class SqlInt16Converter : SqlPrimitiveConverterBase<SqlInt16>
{
protected override object GetValue(SqlInt16 sqlValue) { return sqlValue.Value; }

public override object ReadJson(JsonReader reader, Type objectType, object existingValue, JsonSerializer serializer)
{
if (reader.TokenType == JsonToken.Null)
return SqlInt16.Null;
return (SqlInt16)serializer.Deserialize<short>(reader);
}
}

class SqlInt32Converter : SqlPrimitiveConverterBase<SqlInt32>
{
protected override object GetValue(SqlInt32 sqlValue) { return sqlValue.Value; }

public override object ReadJson(JsonReader reader, Type objectType, object existingValue, JsonSerializer serializer)
{
if (reader.TokenType == JsonToken.Null)
return SqlInt32.Null;
return (SqlInt32)serializer.Deserialize<int>(reader);
}
}

class SqlInt64Converter : SqlPrimitiveConverterBase<SqlInt64>
{
protected override object GetValue(SqlInt64 sqlValue) { return sqlValue.Value; }

public override object ReadJson(JsonReader reader, Type objectType, object existingValue, JsonSerializer serializer)
{
if (reader.TokenType == JsonToken.Null)
return SqlInt64.Null;
return (SqlInt64)serializer.Deserialize<long>(reader);
}
}

class SqlMoneyConverter : SqlPrimitiveConverterBase<SqlMoney>
{
protected override object GetValue(SqlMoney sqlValue) { return sqlValue.Value; }

public override object ReadJson(JsonReader reader, Type objectType, object existingValue, JsonSerializer serializer)
{
if (reader.TokenType == JsonToken.Null)
return SqlMoney.Null;
return (SqlMoney)serializer.Deserialize<decimal>(reader);
}
}

class SqlSingleConverter : SqlPrimitiveConverterBase<SqlSingle>
{
protected override object GetValue(SqlSingle sqlValue) { return sqlValue.Value; }

public override object ReadJson(JsonReader reader, Type objectType, object existingValue, JsonSerializer serializer)
{
if (reader.TokenType == JsonToken.Null)
return SqlSingle.Null;
return (SqlSingle)serializer.Deserialize<float>(reader);
}
}

class SqlStringConverter : SqlPrimitiveConverterBase<SqlString>
{
protected override object GetValue(SqlString sqlValue) { return sqlValue.Value; }

public override object ReadJson(JsonReader reader, Type objectType, object existingValue, JsonSerializer serializer)
{
if (reader.TokenType == JsonToken.Null)
return SqlString.Null;
return (SqlString)serializer.Deserialize<string>(reader);
}
}

Working .Net fiddle forked from your own.

If you need to deserialize the JSON thereby created, you have two additional problems. Firstly, Some of the properties of SqlGeography such as Lat and Long are get-only. You will need to create a custom JsonConverter to fully deserialize this type.

Secondly, Json.NET does not have the ability to deserialize JSON with complex objects for row values to an untyped DataTable. Thus, if you need to deserialize JSON containing a complex object (such as your serialized SqlGeography), you have the following options:

  1. Create and deserialize to a typed DataTable.

  2. Use DataTableConverter directly to populate a pre-existing DataTable with pre-allocated columns, as shown here.

  3. Deserialize to a list of DTOs such as the following:

    public class TableRowDTO
    {
    [JsonConverter(typeof(SqlGeographyConverter))]
    public SqlGeography f1 { get; set; }
    public int id { get; set; }
    }

    Where SqlGeographyConverter is, as required, a custom JsonConverter for SqlGeography.

    And then do:

    var settings = new JsonSerializerSettings().AddSqlConverters();
    var list = JsonConvert.DeserializeObject<List<TableRowDTO>>(jsonString, settings);

C# Custom Json using JSON.NET from dataset or datatable

You can use Linq + DataTableExtensions (in namespace System.Data and system DLL System.Data.DataSetExtensions.dll) to transform your table into an enumerable of anonymous types, then serialize that to JSON with json.net.

I notice your "userIds" property is a JSON array. Do you want all the user userIds for a given alert to be combined? If so, you can use ToLookup to combine them:

        var root = dataTable.AsEnumerable()
.ToLookup(r => r["alert"].ToString(), r => r["userIds"].ToString())
.Select(g => new { message = new { alert = g.Key }, target = new { userIds = g } });

var json = JsonConvert.SerializeObject(root);

If not, do:

        var root = dataTable.AsEnumerable()
.Select(r => new { message = new { alert = r["alert"].ToString() }, target = new { userIds = new [] { r["userIds"].ToString() } } });

var json = JsonConvert.SerializeObject(root);

For the following table:

        var dataTable = new DataTable("A");
dataTable.Columns.Add("alert");
dataTable.Columns.Add("userIds");
dataTable.Rows.Add("Address Updated", "BKAC7759");
dataTable.Rows.Add("Payment Made", "BKAC7759");
dataTable.Rows.Add("Address Updated", "MAND1884");
dataTable.Rows.Add("Payment Made", "MAND1884");

The first produces the following JSON:

[
{"message":{"alert":"Address Updated"},"target":{"userIds":["BKAC7759","MAND1884"]}},
{"message":{"alert":"Payment Made"},"target":{"userIds":["BKAC7759","MAND1884"]}}
]

And the second produces the following:

[
{"message":{"alert":"Address Updated"},"target":{"userIds":["BKAC7759"]}},
{"message":{"alert":"Payment Made"},"target":{"userIds":["BKAC7759"]}},
{"message":{"alert":"Address Updated"},"target":{"userIds":["MAND1884"]}},
{"message":{"alert":"Payment Made"},"target":{"userIds":["MAND1884"]}}
]

Converting excel into JSON using C# with first row in excel serving as the keys

Think you will find the column names under

rdr.GetName(0); //First column name
rdr.GetName(1); //Second column name
rdr.GetName(2); //Third column name

Then you need to put it in a dynamic object of sort. You cannot generate an anonymous object at runtime.

 var list = rdr.Select(x => {
dynamic itm = new ExpandoObject();
itm.Add(rdr.GetName(0), x[0];
itm.Add(rdr.GetName(1), x[1];
itm.Add(rdr.GetName(2), x[2];
return itm;
}).ToList();

think you will be able to take it from there.

Adding full example

var pathToExcel = @"C:\temp\file.xlsx";
var sheetName = "sheetOne";

//This connection string works if you have Office 2007+ installed and your
//data is saved in a .xlsx file
var connectionString = String.Format(@"
Provider=Microsoft.ACE.OLEDB.12.0;
Data Source={0};
Extended Properties=""Excel 12.0 Xml;HDR=YES""
", pathToExcel);

//Creating and opening a data connection to the Excel sheet
using (var conn = new OleDbConnection(connectionString))
{
conn.Open();

var cmd = conn.CreateCommand();
cmd.CommandText = String.Format(
@"SELECT * FROM [{0}$]",
sheetName
);

using (var rdr = cmd.ExecuteReader())
{
//LINQ query - when executed will create anonymous objects for each row
var query =
(from DbDataRecord row in rdr
select row).Select(x =>
{

//dynamic item = new ExpandoObject();
Dictionary<string,object> item = new Dictionary<string, object>();
item.Add(rdr.GetName(0), x[0]);
item.Add(rdr.GetName(1), x[1]);
item.Add(rdr.GetName(2), x[2]);
return item;

});

//Generates JSON from the LINQ query
var json = JsonConvert.SerializeObject(query);
return json;
}
}

net core - convert reader to json

Solved with the following:

var con = _context.Database.GetDbConnection();
var cmd = con.CreateCommand();
cmd.CommandType = CommandType.StoredProcedure;
cmd.CommandText = "procedures.sp_Users";
cmd.Parameters.Add(new SqlParameter("@Command", SqlDbType.VarChar) { Value = "Login" } );
cmd.Parameters.Add(new SqlParameter("@user", SqlDbType.VarChar) { Value = "admin" } );
cmd.Parameters.Add(new SqlParameter("@pass", SqlDbType.VarChar) { Value = "admin" } );

var retObject = new List<dynamic>();
con.Open();
using (var dataReader = cmd.ExecuteReader())
{
while (dataReader.Read())
{
var dataRow = new ExpandoObject() as IDictionary<string, object>;
for (var iFiled = 0; iFiled < dataReader.FieldCount; iFiled++)
dataRow.Add(
dataReader.GetName(iFiled),
dataReader.IsDBNull(iFiled) ? null : dataReader[iFiled] // use null instead of {}
);

retObject.Add((ExpandoObject)dataRow);
}
}
return retObject;

As suggested in the following link

Dealing with large JSON data returned by Web API

Your problem is that you are running an Oracle query that is returning a very large number of results, and then loading that entire result set into memory before serializing it out to the HttpResponseMessage.

To reduce your memory usage, you should find and eliminate all cases where the entire set of results from the query is loaded into a temporary intermediate representation (e.g. a DataTable or JSON string), and instead stream the data out using a DataReader. This avoids pulling everything into memory at once according to this answer.

First, from your traceback, it appears you have Enable Browser Link checked. Since this apparently tries to cache the entire response in a MemoryStream, you will want to disable it as explained in FilePathResult thrown an OutOfMemoryException with large file.

Next, you can stream the contents of an IDataReader directly to JSON using Json.NET with following class and converter:

[JsonConverter(typeof(OracleDataTableJsonResponseConverter))]
public sealed class OracleDataTableJsonResponse
{
public string ConnectionString { get; private set; }
public string QueryString { get; private set; }
public OracleParameter[] Parameters { get; private set; }

public OracleDataTableJsonResponse(string connStr, string strQuery, OracleParameter[] prms)
{
this.ConnectionString = connStr;
this.QueryString = strQuery;
this.Parameters = prms;
}
}

class OracleDataTableJsonResponseConverter : JsonConverter
{
public override bool CanConvert(Type objectType)
{
return objectType == typeof(OracleDataTableJsonResponse);
}

public override object ReadJson(JsonReader reader, Type objectType, object existingValue, JsonSerializer serializer)
{
throw new NotImplementedException("OracleDataTableJsonResponse is only for writing JSON. To read, deserialize into a DataTable");
}

public override void WriteJson(JsonWriter writer, object value, JsonSerializer serializer)
{
var response = (OracleDataTableJsonResponse)value;

using (var dbconn = new OracleConnection(response.ConnectionString))
{
dbconn.Open();
using (var selectCommand = new OracleCommand(response.QueryString, dbconn))
{
if (response.Parameters != null)
selectCommand.Parameters.AddRange(response.Parameters);
using (var reader = selectCommand.ExecuteReader())
{
writer.WriteDataTable(reader, serializer);
}
}
}
}
}

public static class JsonExtensions
{
public static void WriteDataTable(this JsonWriter writer, IDataReader reader, JsonSerializer serializer)
{
if (writer == null || reader == null || serializer == null)
throw new ArgumentNullException();
writer.WriteStartArray();
while (reader.Read())
{
writer.WriteStartObject();
for (int i = 0; i < reader.FieldCount; i++)
{
writer.WritePropertyName(reader.GetName(i));
serializer.Serialize(writer, reader[i]);
}
writer.WriteEndObject();
}
writer.WriteEndArray();
}
}

Then modify your code to look something like:

    public HttpResponseMessage Getdetails([FromUri] string[] id)
{
var prms = new List<OracleParameter>();
var connStr = ConfigurationManager.ConnectionStrings["PDataConnection"].ConnectionString;
var inconditions = id.Distinct().ToArray();
var strQuery = @"SELECT
STCD_PRIO_CATEGORY_DESCR.DESCR AS CATEGORY,
STCD_PRIO_CATEGORY_DESCR.SESSION_NUM AS SESSION_NUMBER,
Trunc(STCD_PRIO_CATEGORY_DESCR.START_DATE) AS SESSION_START_DATE,
STCD_PRIO_CATEGORY_DESCR.START_DATE AS SESSION_START_TIME ,
Trunc(STCD_PRIO_CATEGORY_DESCR.END_DATE) AS SESSION_END_DATE,
FROM
STCD_PRIO_CATEGORY_DESCR,
WHERE
STCD_PRIO_CATEGORY_DESCR.STD_REF IN(";
var sb = new StringBuilder(strQuery);
for (int x = 0; x < inconditions.Length; x++)
{
sb.Append(":p" + x + ",");
var p = new OracleParameter(":p" + x, OracleDbType.NVarchar2);
p.Value = inconditions[x];
prms.Add(p);
}
if (sb.Length > 0)// Should this be inconditions.Length > 0 ?
sb.Length--;
strQuery = sb.Append(")").ToString();

var returnObject = new { data = new OracleDataTableJsonResponse(connStr, strQuery, prms.ToArray()) };
var response = Request.CreateResponse(HttpStatusCode.OK, returnObject, MediaTypeHeaderValue.Parse("application/json"));
ContentDispositionHeaderValue contentDisposition = null;


Related Topics



Leave a reply



Submit