convert a flat json file to tree structure in javascript
You can rely on tracking an object reference and do this without any recursion. Using Object.assign
, map the list of nodes to its children:
// Assuming that input is in `input`
const nodes = input.nodes.reduce((a, node) => {
a[node.index] = { ...node, index: undefined };
return a;
}, []);
// organize the links by their source
const links = input.links.reduce((a, link) => {
return a.set((a.get(link.source) || []).concat(nodes[link.target]);
}, new Map());
// Apply side effect of updating node children
nodes.forEach(node => Object.assign(node, {
children: links.get(node.index),
}));
So I'm taking the list of nodes, and assigning to each (to mutate the node itself -- keep in mind this is a side-effect) a new array. Those children
are all the links that link this node, and we Array#map
them to convert their target
ID into the actual node we want.
Convert Flat JSON to JSON tree using Javascript
For something like this it is easiest to first create a nested object with the root node names as keys for top level which can be done with Array#reduce()
.
Then split the NodeName values into array and use another reduce() to walk through the parents to get specific parent to push to
Finally get the outer array from the main object using Object.values()
const map = data.reduce((a,{Node,NodeParent})=>{ if(!NodeParent){ a[Node] ={ Node, children:[] }; }else{ const parent = NodeParent.split(/;|:/g).reduce((a,c)=>{ a[c] = a[c] || { Node:c, children:[] }; return a[c] },a); parent.children.push({Node}) } return a;},{});
const res = Object.values(map);
console.log(JSON.stringify(res,null,4));
.as-console-wrapper { max-height: 100%!important;}
<script>const data = [{ "Node": "Node1", "NodeParent": "" },{ "Node": "Node1A", "NodeParent": "Node1" },{ "Node": "Node1B", "NodeParent": "Node1" },{ "Node": "Node2", "NodeParent": "" },{ "Node": "Node2A", "NodeParent": "Node2" },{ "Node": "Node2B", "NodeParent": "Node2" },{ "Node": "Node3", "NodeParent": "" },{ "Node": "Node3A", "NodeParent": "Node3" },{ "Node": "Node3A1", "NodeParent": "Node3;Node3A" },{ "Node": "Node3A2", "NodeParent": "Node3;Node3A" },{ "Node": "Node3A3", "NodeParent": "Node3;Node3A" },{ "Node": "Node3A1x", "NodeParent": "Node3;Node3A:Node3A1" },{ "Node": "Node3A1y", "NodeParent": "Node3;Node3A:Node3A1" },{ "Node": "Node3A1z", "NodeParent": "Node3;Node3A:Node3A1" },{ "Node": "Node3B", "NodeParent": "Node3" },{ "Node": "Node3B1", "NodeParent": "Node3;Node3B" },{ "Node": "Node3B2", "NodeParent": "Node3;Node3B" },{ "Node": "Node3B3", "NodeParent": "Node3;Node3B" },{ "Node": "Node3B1x", "NodeParent": "Node3;Node3B:Node3B1" },{ "Node": "Node3B1y", "NodeParent": "Node3;Node3B:Node3B1" },{ "Node": "Node3B1z", "NodeParent": "Node3;Node3B:Node3B1" }]</script>
How to convert flat multi-branch data to hierarchical JSON?
I suggest you to create a tree and take id === pid
as a root for the tree, which works for unsorted data.
How it works:
Basically, for every object in the array, it takes the
id
for building a new object asparentid
for a new object.For example:
{ "id": 6, "pid": 4 }
It generates this property first with
id
:"6": {
"id": 6,
"pid": 4
}
and then with
pid
:"4": {
"children": [
{
"id": 6,
"pid": 4
}
]
},
and while all objects are similarly treated, we finally get a tree.
If
id === pid
, the root node is found. This is the object for the later return.
var data = [ { "id": "f", "pid": "b", "name": "F" }, { "id": "e", "pid": "c", "name": "E" }, { "id": "d", "pid": "c", "name": "D" }, { "id": "c", "pid": "b", "name": "C" }, { "id": "a", "pid": "a", "name": "A" }, { "id": "b", "pid": "a", "name": "B" } ], tree = function (data) { var r, o = Object.create(null); data.forEach(function (a) { a.children = o[a.id] && o[a.id].children; o[a.id] = a; if (a.id === a.pid) { r = a; } else { o[a.pid] = o[a.pid] || {}; o[a.pid].children = o[a.pid].children || []; o[a.pid].children.push(a); } }); return r; }(data);
console.log(tree);
Convert flat JSON file to hierarchical json data like flare.json [d3 example file]
Updated to use a recursive method
This should work for n
levels rather than just 2 or 3. You just need to specify which properties define which levels.
var data = [
{ "dep": "First Top", "name": "First child", "model": "value1", "size": "320" },
{ "dep": "First Top", "name": "First child", "model": "value2", "size": "320" },
{ "dep": "First Top", "name": "SECOND CHILD", "model": "value1", "size": "320" },
{ "dep": "Second Top", "name": "First Child", "model": "value1", "size": "320" }
];
var newData = { name :"root", children : [] },
levels = ["dep","name"];
// For each data row, loop through the expected levels traversing the output tree
data.forEach(function(d){
// Keep this as a reference to the current level
var depthCursor = newData.children;
// Go down one level at a time
levels.forEach(function( property, depth ){
// Look to see if a branch has already been created
var index;
depthCursor.forEach(function(child,i){
if ( d[property] == child.name ) index = i;
});
// Add a branch if it isn't there
if ( isNaN(index) ) {
depthCursor.push({ name : d[property], children : []});
index = depthCursor.length - 1;
}
// Now reference the new child array as we go deeper into the tree
depthCursor = depthCursor[index].children;
// This is a leaf, so add the last element to the specified branch
if ( depth === levels.length - 1 ) depthCursor.push({ name : d.model, size : d.size });
});
});
Node JS: Make a flat json from a tree json
I'll start with the finished solution...
There's a big explanation at the end of this answer. Let's try to think big-picture for a little bit first tho.
readdirp('.')
.fmap(filter(match(/\.json$/)))
.fmap(map(readfilep))
.fmap(map(fmap(JSON.parse)))
.fmap(concatp)
.fmap(flatten)
.fmap(reduce(createMap)({}))
.fmap(data=> JSON.stringify(data, null, '\t'))
.fmap(writefilep(resolve(__dirname, 'result.json')))
.then(filename=> console.log('wrote results to %s', filename), err=>console.error(err));
Console output
wrote results to /path/to/result.json
result.json
(I added a c.json
with some data to show that this works with more than 2 files)
{
"addEmoticon1": "Hello, {name}!",
"addPhoto1": "How are you??",
"close1": "Close!",
"somethingelse": "Something!"
}
Implementation
I made Promise
-based interfaces for readdir
and readFile
and writeFile
import {readdir, readFile, writeFile} from 'fs';
const readdirp = dir=>
new Promise((pass,fail)=>
readdir(dir, (err, filenames) =>
err ? fail(err) : pass(mapResolve (dir) (filenames))));
const readfilep = path=>
new Promise((pass,fail)=>
readFile(path, 'utf8', (err,data)=>
err ? fail(err) : pass(data)));
const writefilep = path=> data=>
new Promise((pass,fail)=>
writeFile(path, data, err=>
err ? fail(err) : pass(path)));
In order to map functions to our Promises, we needed an fmap
utility. Notice how we take care to bubble errors up.
Promise.prototype.fmap = function fmap(f) {
return new Promise((pass,fail) =>
this.then(x=> pass(f(x)), fail));
};
And here's the rest of the utilities
const fmap = f=> x=> x.fmap(f);
const mapResolve = dir=> map(x=>resolve(dir,x));
const map = f=> xs=> xs.map(x=> f(x));
const filter = f=> xs=> xs.filter(x=> f(x));
const match = re=> s=> re.test(s);
const concatp = xs=> Promise.all(xs);
const reduce = f=> y=> xs=> xs.reduce((y,x)=> f(y)(x), y);
const flatten = reduce(y=> x=> y.concat(Array.isArray(x) ? flatten (x) : x)) ([]);
Lastly, the one custom function that does your work
const createMap = map=> ({id, defaultMessage})=>
Object.assign(map, {[id]: defaultMessage});
And here's c.json
[
{
"id": "somethingelse",
"description": "something",
"defaultMessage": "Something!"
}
]
"Why so many little functions ?"
Well despite what you may think, you have a pretty big problem. And big problems are solved by combining several small solutions. The most prominent advantage of this code is that each function has a very distinct purpose and it will always produce the same results for the same inputs. This means each function can be used other places in your program. Another advantage is that smaller functions are easier to read, reason with, and debug.
Compare all of this to the other answers given here; @BlazeSahlen's in particular. That's over 60 lines of code that's basically only usable to solve this one particular problem. And it doesn't even filter out non-JSON files. So the next time you need to create a sequence of actions on reading/writing files, you'll have to rewrite most of those 60 lines each time. It creates lots of duplicated code and hard-to-find bugs because of exhausting boilerplate. And all that manual error-handling... wow, just kill me now. And he/she thought callback hell was bad ? haha, he/she just created yet another circle of hell all on his/her own.
All the code together...
Functions appear (roughly) in the order they are used
import {readdir, readFile, writeFile} from 'fs';
import {resolve} from 'path';
// logp: Promise<Value> -> Void
const logp = p=> p.then(x=> console.log(x), x=> console.err(x));
// fmap : Promise<a> -> (a->b) -> Promise<b>
Promise.prototype.fmap = function fmap(f) {
return new Promise((pass,fail) =>
this.then(x=> pass(f(x)), fail));
};
// fmap : (a->b) -> F<a> -> F<b>
const fmap = f=> x=> x.fmap(f);
// readdirp : String -> Promise<Array<String>>
const readdirp = dir=>
new Promise((pass,fail)=>
readdir(dir, (err, filenames) =>
err ? fail(err) : pass(mapResolve (dir) (filenames))));
// mapResolve : String -> Array<String> -> Array<String>
const mapResolve = dir=> map(x=>resolve(dir,x));
// map : (a->b) -> Array<a> -> Array<b>
const map = f=> xs=> xs.map(x=> f(x));
// filter : (Value -> Boolean) -> Array<Value> -> Array<Value>
const filter = f=> xs=> xs.filter(x=> f(x));
// match : RegExp -> String -> Boolean
const match = re=> s=> re.test(s);
// readfilep : String -> Promise<String>
const readfilep = path=>
new Promise((pass,fail)=>
readFile(path, 'utf8', (err,data)=>
err ? fail(err) : pass(data)));
// concatp : Array<Promise<Value>> -> Array<Value>
const concatp = xs=> Promise.all(xs);
// reduce : (b->a->b) -> b -> Array<a> -> b
const reduce = f=> y=> xs=> xs.reduce((y,x)=> f(y)(x), y);
// flatten : Array<Array<Value>> -> Array<Value>
const flatten = reduce(y=> x=> y.concat(Array.isArray(x) ? flatten (x) : x)) ([]);
// writefilep : String -> Value -> Promise<String>
const writefilep = path=> data=>
new Promise((pass,fail)=>
writeFile(path, data, err=>
err ? fail(err) : pass(path)));
// -----------------------------------------------------------------------------
// createMap : Object -> Object -> Object
const createMap = map=> ({id, defaultMessage})=>
Object.assign(map, {[id]: defaultMessage});
// do it !
readdirp('.')
.fmap(filter(match(/\.json$/)))
.fmap(map(readfilep))
.fmap(map(fmap(JSON.parse)))
.fmap(concatp)
.fmap(flatten)
.fmap(reduce(createMap)({}))
.fmap(data=> JSON.stringify(data, null, '\t'))
.fmap(writefilep(resolve(__dirname, 'result.json')))
.then(filename=> console.log('wrote results to %s', filename), err=>console.error(err));
Still having trouble following along?
It's not easy to see how these things work at first. This is a particularly squirrely problem because the data gets nested very quickly. Thankfully that doesn't mean our code has to be a big nested mess just to solve the problem ! Notice the code stays nice and flat even when we're dealing with things like a Promise of an Array of Promises of JSON...
// Here we are reading directory '.'
// We will get a Promise<Array<String>>
// Let's say the files are 'a.json', 'b.json', 'c.json', and 'run.js'
// Promise will look like this:
// Promise<['a.json', 'b.json', 'c.json', 'run.js']>
readdirp('.')
// Now we're going to strip out any non-JSON files
// Promise<['a.json', 'b.json', 'c.json']>
.fmap(filter(match(/\.json$/)))
// call `readfilep` on each of the files
// We will get <Promise<Array<Promise<JSON>>>>
// Don't freak out, it's not that bad!
// Promise<[Promise<JSON>, Promise<JSON>. Promise<JSON>]>
.fmap(map(readfilep))
// for each file's Promise, we want to parse the data as JSON
// JSON.parse returns an object, so the structure will be the same
// except JSON will be an object!
// Promise<[Promise<Object>, Promise<Object>, Promise<Object>]>
.fmap(map(fmap(JSON.parse)))
// Now we can start collapsing some of the structure
// `concatp` will convert Array<Promise<Value>> to Array<Value>
// We will get
// Promise<[Object, Object, Object]>
// Remember, we have 3 Objects; one for each parsed JSON file
.fmap(concatp)
// Your particular JSON structures are Arrays, which are also Objects
// so that means `concatp` will actually return Promise<[Array, Array, Array]
// but we'd like to flatten that
// that way each parsed JSON file gets mushed into a single data set
// after flatten, we will have
// Promise<Array<Object>>
.fmap(flatten)
// Here's where it all comes together
// now that we have a single Promise of an Array containing all of your objects ...
// We can simply reduce the array and create the mapping of key:values that you wish
// `createMap` is custom tailored for the mapping you need
// we initialize the `reduce` with an empty object, {}
// after it runs, we will have Promise<Object>
// where Object is your result
.fmap(reduce(createMap)({}))
// It's all downhill from here
// We currently have Promise<Object>
// but before we write that to a file, we need to convert it to JSON
// JSON.stringify(data, null, '\t') will pretty print the JSON using tab to indent
// After this, we will have Promise<JSON>
.fmap(data=> JSON.stringify(data, null, '\t'))
// Now that we have a JSON, we can easily write this to a file
// We'll use `writefilep` to write the result to `result.json` in the current working directory
// I wrote `writefilep` to pass the filename on success
// so when this finishes, we will have
// Promise<Path>
// You could have it return Promise<Void> like writeFile sends void to the callback. up to you.
.fmap(writefilep(resolve(__dirname, 'result.json')))
// the grand finale
// alert the user that everything is done (or if an error occurred)
// Remember `.then` is like a fork in the road:
// the code will go to the left function on success, and the right on failure
// Here, we're using a generic function to say we wrote the file out
// If a failure happens, we write that to console.error
.then(filename=> console.log('wrote results to %s', filename), err=>console.error(err));
All done !
Build tree array from flat array in javascript
There is an efficient solution if you use a map-lookup. If the parents always come before their children you can merge the two for-loops. It supports multiple roots. It gives an error on dangling branches, but can be modified to ignore them. It doesn't require a 3rd-party library. It's, as far as I can tell, the fastest solution.
function list_to_tree(list) {
var map = {}, node, roots = [], i;
for (i = 0; i < list.length; i += 1) {
map[list[i].id] = i; // initialize the map
list[i].children = []; // initialize the children
}
for (i = 0; i < list.length; i += 1) {
node = list[i];
if (node.parentId !== "0") {
// if you have dangling branches check that map[node.parentId] exists
list[map[node.parentId]].children.push(node);
} else {
roots.push(node);
}
}
return roots;
}
var entries = [{
"id": "12",
"parentId": "0",
"text": "Man",
"level": "1",
"children": null
},
{
"id": "6",
"parentId": "12",
"text": "Boy",
"level": "2",
"children": null
},
{
"id": "7",
"parentId": "12",
"text": "Other",
"level": "2",
"children": null
},
{
"id": "9",
"parentId": "0",
"text": "Woman",
"level": "1",
"children": null
},
{
"id": "11",
"parentId": "9",
"text": "Girl",
"level": "2",
"children": null
}
];
console.log(list_to_tree(entries));
How to convert json to tree array in JS?
You could take an iterative and recursive approach.
function getNodes(object) { return Object .entries(object) .map(([key, value]) => value && typeof value === 'object' ? { title: key, key, children: getNodes(value) } : { title: key, key, value } );}
const data = { parent1: { child1: { bar: "1" }, child2: "2" }, parent2: { child1: "1" } }, result = getNodes(data);
console.log(result);
.as-console-wrapper { max-height: 100% !important; top: 0; }
How to convert parent child array to json tree structure by javascript
You can perform a reduce
operation on the array using an object to store the references to each object (for adding children to) and an array to store the result.
const arr = [
{"id": 123, "parentid": 0, "name": "Mammals"},
{"id": 456, "parentid": 123, "name": "Dogs"},
{"id": 214, "parentid": 456, "name": "Labradors"},
{"id": 810, "parentid": 456, "name": "Pugs"},
{"id": 919, "parentid": 456, "name": "Terriers"}
];
const {res} = arr.reduce((acc,curr)=>{
if(acc.parentMap[curr.parentid]){
(acc.parentMap[curr.parentid].children =
acc.parentMap[curr.parentid].children || []).push(curr);
} else {
acc.res.push(curr);
}
acc.parentMap[curr.id] = curr;
return acc;
}, {parentMap: {}, res: []});
console.log(res);
Related Topics
How to Implement Ping/Pong Request for Websocket Connection Alive in JavaScript
How to Align Text Input Correctly in React Native
Document.Queryselector Always Returns Null
Fix: Js Recursive Function to Get the Nested (Multilevel) Child Objects as Array of Objects
Getting Selected Values from Dropdown Inside Table
Disable Scrolling When Touch Moving Certain Element
Edit Table Row Inline on Click of Edit in Angular
How to Move the Mouse Pointer and Click With Selenium Webdriver JavaScript
Exclude Weekends in JavaScript Date Calculation
Angular 4 - Cannot Read Property Status of Null While Displaying Validation Error
Convert a Flat Json File to Tree Structure in JavaScript
How to Pass Value into Href Link of a Bootstrap Modal
Bootstrap 4 Navbar-Toggler-Icon Does Not Appear
Javascript, Track Iframes Redirecting Top Window
This.Props.History.Push Works in Some Components and Not Others
Will a React Component Re-Render If Its Props Are Updated, But Don't Change in Value
Callback Function Cannot Access Variable Within Parent Function'S Scope