Directly Convert CSV File to JSON File Using the Jackson Library

directly convert CSV file to JSON file using the Jackson library

I think, you should use MappingIterator to solve your problem. See below example:

import java.io.File;
import java.io.IOException;
import java.util.List;
import java.util.Map;

import com.fasterxml.jackson.databind.MappingIterator;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.fasterxml.jackson.dataformat.csv.CsvMapper;
import com.fasterxml.jackson.dataformat.csv.CsvSchema;

public class JacksonProgram {

public static void main(String[] args) throws Exception {
File input = new File("/x/data.csv");
File output = new File("/x/data.json");

List<Map<?, ?>> data = readObjectsFromCsv(input);
writeAsJson(data, output);
}

public static List<Map<?, ?>> readObjectsFromCsv(File file) throws IOException {
CsvSchema bootstrap = CsvSchema.emptySchema().withHeader();
CsvMapper csvMapper = new CsvMapper();
try (MappingIterator<Map<?, ?>> mappingIterator = csvMapper.readerFor(Map.class).with(bootstrap).readValues(file)) {
return mappingIterator.readAll();
}
}

public static void writeAsJson(List<Map<?, ?>> data, File file) throws IOException {
ObjectMapper mapper = new ObjectMapper();
mapper.writeValue(file, data);
}
}

See this page: jackson-dataformat-csv for more information and examples.

convert csv to json using jackson

Using Jackson libraries, I figured out how to implement this in a stepwise fasion, using JsonFactory and JsonGenerator. thank you everybody.

Code:

public class JsonMapper implements Processor {

private int noOfRows = 0; //does not include the header record
private int noOfColumns = 0; //includes timestamp field

private static Logger log = LoggerFactory.getLogger(JsonMapper.class.getName());

@Override
public void process(Exchange exchange) throws Exception {

log.info("Entering Json Mapper Processor...");

String body = exchange.getIn().getBody(String.class);
List<String> serviceRecords = new ArrayList<String>(Arrays.asList(body.split(System.lineSeparator())));
noOfRows = serviceRecords.size() - 1;
noOfColumns = getColumns(serviceRecords);
StringBuilder header = getHeader( serviceRecords );
serviceRecords.remove(0); //remove header record only data records in list
String hdr[] = header.toString().split(",");
//Noble_nst_mud-pump-2_NST_MUDPIT1_VOL,Noble_nst_mud-pump-2_NST_MUDPIT2_VOL,Noble_nst_mud-pump-2_NST_MUDPIT3_VOL,Noble_nst_mud-pump-2_NST_MUDPIT4_VOL,Noble_nst_mud-pump-2_NST_MUDPIT5_VOL,Noble_nst_mud-pump-2_NST_MUDPIT6_VOL,Noble_nst_mud-pump-2_NST_MUDPIT7_VOL,Noble_nst_mud-pump-2_NST_MUDPIT8_VOL,Noble_nst_mud-pump-2_NST_R_MUDPIT1_VOL,Noble_nst_mud-pump-2_NST_R_MUDPIT2_VOL,Noble_nst_mud-pump-2_NST_R_MUDPIT3_VOL,Noble_nst_mud-pump-2_NST_R_MUDPIT4_VOL,Noble_nst_mud-pump-2_NST_MP2_MOT_RUN_001,Noble_nst_mud-pump-2_NST_MP2_AI_021,Noble_nst_mud-pump-2_NST_MP2_AI_023,Noble_nst_mud-pump-2_NST_MP2_AI_022,Noble_nst_mud-pump-2_NST_MP2_AI_004,Noble_nst_mud-pump-2_NST_MP2_AI_005,Noble_nst_mud-pump-2_NST_MP2_AI_006,Noble_nst_mud-pump-2_NST_MP2_AI_031,Noble_nst_mud-pump-2_NST_MP2_AI_033,Noble_nst_mud-pump-2_NST_MP2_AI_032,Noble_nst_mud-pump-2_NST_MP2_AI_011,Noble_nst_mud-pump-2_NST_MP2_AI_012,Noble_nst_mud-pump-2_NST_MP2_AI_013,Noble_nst_mud-pump-2_NST_MP2_MOT_RUN_010,Noble_nst_mud-pump-2_NST_MP2_MOT_RUN_004,Noble_nst_mud-pump-2_NST_MP2_MOT_RUN_005,Noble_nst_mud-pump-2_NST_MP2_MOT_RUN_006,Noble_nst_mud-pump-2_NST_MP2_MOT_RUN_009
//04/10/2018 07:55:10 PM, 1, 0, 2, 0, 81, 64, 64, 0, 0, 0, 0, 0, 0, 0, 0, 0, 289.4146, 288.65, 288.65, 0, 0, 0, 288.9146, 291.15, 289.2283, 0, 0, 0, 0, 0
//String [] record = serviceRecords.toArray(new String[serviceRecords.size()]);

JsonFactory factory = new JsonFactory();
StringWriter sw = new StringWriter();
JsonGenerator generator = factory.createGenerator(sw);
generator.setPrettyPrinter(new DefaultPrettyPrinter());

generator.writeStartObject(); // {
generator.writeFieldName("tags"); // {"tags":
generator.writeStartArray(); // {"tags": [

for ( String rec : serviceRecords ) {
String[] data = rec.split(",");

for ( int i = 0; i < (noOfColumns - 1); ++i ) { //header has 1 less field than data record
generator.writeStartObject(); // {
generator.writeStringField("tagId", hdr[i].trim()); // { "tagId": "NST_MD1_CRANK_CASE"
generator.writeFieldName("data"); // { "tagId": "NST_MD1_CRANK_CASE", "data"
generator.writeStartArray(); // { "tagId": "NST_MD1_CRANK_CASE", "data" : [
generator.writeStartObject(); // { "tagId": "NST_MD1_CRANK_CASE", "data" : [{
generator.writeStringField("ts", data[0].trim()); // { "tagId": "NST_MD1_CRANK_CASE", "data" : [{"ts":"2017-11-15T19:55:00"
generator.writeStringField("v", data[i+1].trim()); // { "tagId": "NST_MD1_CRANK_CASE", "data" : [{"ts":"2017-11-15T19:55:00","v" : "100"
generator.writeStringField("q", "3"); // { "tagId": "NST_MD1_CRANK_CASE", "data" : [{"ts":"2017-11-15T19:55:00","v" : "100","q":"3"
generator.writeEndObject(); // { "tagId": "NST_MD1_CRANK_CASE", "data" : [{"ts":"2017-11-15T19:55:00","v" : "100","q":"3"}
generator.writeEndArray(); // { "tagId": "NST_MD1_CRANK_CASE", "data" : [{"ts":"2017-11-15T19:55:00","v" : "100","q":"3"}]
generator.writeEndObject(); // { "tagId": "NST_MD1_CRANK_CASE", "data" : [{"ts":"2017-11-15T19:55:00","v" : "100","q":"3"}]}
}
}
generator.writeEndArray(); // {"tags": [ { "tagId": "NST_MD1_CRANK_CASE", "data" : [{"ts":"2017-11-15T19:55:00","v" : "100","q":"3"}]} ]
generator.writeEndObject(); // {"tags": [ { "tagId": "NST_MD1_CRANK_CASE", "data" : [{"ts":"2017-11-15T19:55:00","v" : "100","q":"3"}]} ]}
generator.flush();
generator.close();

log.info("JSON: " + sw.toString());

exchange.getIn().setBody(sw.toString());
sw.close();
log.info("Leaving Json Mapper Processor...");

}

public StringBuilder getHeader(List<String> serviceRecords) {
StringBuilder sb = new StringBuilder();
for ( int i = 0; i < 1; i++ ) {
log.debug( "count: " + i + " record: : " + serviceRecords.get(i).toString() );
if ( i == 0 ) {
String[] sa = serviceRecords.get(i).toString().split(",");
for ( int j = 0; j < sa.length; ++j) {
if ( j != 0 ) {
sb.append(sa[j]).append(",");
}
}
sb.deleteCharAt(sb.lastIndexOf(",", sb.length()));
} else {
break;
}
}
return sb;
}

public int getColumns(List<String> serviceRecords) {
int columns = 0;
for ( int i = 0; i < 1; i++ ) {
log.debug( "count: " + i + " record: : " + serviceRecords.get(i).toString() );
if ( i == 0 ) {
String[] sa = serviceRecords.get(i).toString().split(",");
columns = sa.length;
} else {
break;
}
}
return columns;
}

}

how to read a csv to a nested json with jackson java

You do not have to always deserialise CSV to a POJO structure and implement custom serialisers. In this case, you can also:

  • Deserialise CSV to a Map
  • Group by elements in a Map to a form metric -> [[...], [...]]
  • Convert above Map to another form of Map
  • Serialise Map to a JSON

Example code could look like below:

import com.fasterxml.jackson.core.util.DefaultIndenter;
import com.fasterxml.jackson.core.util.DefaultPrettyPrinter;
import com.fasterxml.jackson.databind.MappingIterator;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.fasterxml.jackson.databind.SerializationFeature;
import com.fasterxml.jackson.databind.json.JsonMapper;
import com.fasterxml.jackson.dataformat.csv.CsvMapper;
import com.fasterxml.jackson.dataformat.csv.CsvSchema;

import java.io.File;
import java.math.BigDecimal;
import java.util.Arrays;
import java.util.LinkedHashMap;
import java.util.List;
import java.util.Map;
import java.util.Objects;
import java.util.Spliterator;
import java.util.Spliterators;
import java.util.stream.Collectors;
import java.util.stream.Stream;
import java.util.stream.StreamSupport;

public class CsvApp {

public static void main(String[] args) throws Exception {
File csvFile = new File("./resource/test.csv").getAbsoluteFile();

CsvMapper csvMapper = CsvMapper.builder().build();
MappingIterator<Map> rows = csvMapper
.readerWithSchemaFor(Map.class)
.with(CsvSchema.emptySchema().withHeader())
.readValues(csvFile);

DataConverter converter = new DataConverter();
List<Map<String, Object>> result = converter.toMetricDataPoints(rows);

ObjectMapper jsonMapper = JsonMapper.builder()
.enable(SerializationFeature.INDENT_OUTPUT)
.build();

jsonMapper.writeValue(System.out, result);
}

}

class DataConverter {

public List<Map<String, Object>> toMetricDataPoints(MappingIterator<Map> rows) {
return toStream(rows)
//group by metric -> [value, date]
.collect(Collectors.groupingBy(map -> map.get("metric"),
Collectors.mapping(map -> Arrays.asList(toNumber(map.get("value")), toNumber(map.get("date"))),
Collectors.toList())))
.entrySet().stream()
// convert to Map: metric + datapoints
.map(entry -> {
Map<String, Object> res = new LinkedHashMap<>(4);
res.put("metric", entry.getKey());
res.put("datapoints", entry.getValue());

return res;
}).collect(Collectors.toList());
}

private Stream<Map> toStream(MappingIterator<Map> rowIterator) {
return StreamSupport.stream(Spliterators.spliteratorUnknownSize(rowIterator, Spliterator.ORDERED), false);
}

private long toNumber(Object value) {
return new BigDecimal(Objects.toString(value, "0")).longValue();
}
}

Above code prints:

[ {
"metric" : "temp_a",
"datapoints" : [ [ 622, 1477895624866 ], [ -3, 1477916224866 ], [ 365, 1477917224866 ] ]
}, {
"metric" : "temp_b",
"datapoints" : [ [ 861, 1477895624866 ], [ 767, 1477917224866 ] ]
} ]

As you can see, we used only basic Jackson functionality, rest of manipulation on data we implemented using Java 8 API.

See also:

  1. Directly convert CSV file to JSON file using the Jackson library
  2. How to convert an iterator to a stream?
  3. Jackson JSON Deserialization: array elements in each line

Csv to Json conversion in java

Im my opinion, you should created

  1. Java class

       public class ProjectData {
    private String projectName; // for example Blaze
    private String fullName; // for example "overallPorjects/projectBasepath/Project1/Blaze"
    private List<ProjectData> childreen = new ArrayList<>();

    public String getParent() {
    int index = fullName.lastIndexOf("/");
    if(index > 0) {
    return fullName.substr(0, index);
    } else {
    return null;
    }
    }
    ...
    }
  2. Then created Map<String,ProjectData> with containg values keys like

         "overallPorjects", "overallPorjects/projectBasepath"
  3. Then you need iterated for csv (first row is root element), save information to Map, get parent object from map, and add new object to childreen list for this parent

  4. After you just save ProjectData to json,

For example (draft, not checking):

  Map<String,ProjectData> map = new HashMap();
ProjectData root = null;
while(isNextInCsv()) {
ProjectData project = getNextProjectDataFromCSV();
String parent = project.getParent();
if(parent == null) {
root = project;
} else {
map.get(parent).getChildreen().add(project);
}
map.put(project.getFullName(), project);
}
if(root != null) {
saveToJson(root);
}

Convert csv data string to json - Java - Play framework

You can do this with fasterxml.jackson library

   File input = new File("input.csv");
try {
CsvSchema csv = CsvSchema.emptySchema().withHeader();
CsvMapper csvMapper = new CsvMapper();
MappingIterator<Map<?, ?>> mappingIterator = csvMapper.reader().forType(Map.class).with(csv).readValues(input);
List<Map<?, ?>> list = mappingIterator.readAll();
System.out.println(list);
} catch(Exception e) {
e.printStackTrace();
}

U have to add jackson-dataformat-csv and jackson-databind JAR as dependency

Edit:
In case input is String, you can pass String instead of file



Related Topics



Leave a reply



Submit