| Bytes | Lang | Time | Link |
|---|---|---|---|
| nan | Go | 250529T072658Z | Ahamad |
| nan | 230714T102300Z | Ed The & | |
| 011 | GNU AWK | 230620T064127Z | jared_ma |
| 715 | Go | 230622T112125Z | Snaddyvi |
| nan | 230617T164337Z | Arnauld | |
| 000 | C | 230618T220038Z | AnttiP |
| nan | 230619T022859Z | tsh | |
| 035 | Retina 0.8.2 | 230619T003458Z | Neil |
| nan | 230618T003021Z | 138 Aspe | |
| nan | 230618T143429Z | user1183 | |
| 621 | JavaScript Node.js | 230618T122816Z | noodle p |
Go, with sonic
package main
import (
"bufio"
"fmt"
"os"
"github.com/bytedance/sonic"
)
func main() {
scanner := bufio.NewScanner(os.Stdin)
for scanner.Scan() {
line := scanner.Text()
var jsonValue interface{}
if err := sonic.Unmarshal([]byte(line), &jsonValue); err != nil {
jsonValue = nil
}
fmt.Println(search(jsonValue))
}
if err := scanner.Err(); err != nil {
fmt.Fprintf(os.Stderr, "Error reading input: %v\n", err)
}
os.Exit(0)
}
func search(node interface{}) uint32 {
switch v := node.(type) {
case map[string]interface{}:
maxDepth := uint32(0)
for _, value := range v {
depth := search(value)
if depth > maxDepth {
maxDepth = depth
}
}
return 1 + maxDepth
case []interface{}:
maxDepth := uint32(0)
for _, value := range v {
depth := search(value)
if depth > maxDepth {
maxDepth = depth
}
}
return 1 + maxDepth
default:
return 0
}
}
go env -w GO111MODULE=on # enable go modules globally (go ver>=1.13)
go mod init my_module
# Edit golang file(s)
go get
go run main.go
[]
1
[1, 2, 3, 4, 5]
1
[ [ 123], []]
2
[{"a": []}, ["abc"]]
3
{"a": 321}
1
{"a": []}
2
"hello"
0
["abc[]bca"]
1
Racket
#lang typed/racket/base
(require typed/json)
#|
Calculate the maximum depth of a JSON object read from input
Algorithm:
1. Read user input and parse JSON.
2. If input has no data, return 0.
2. Begin `max-depth` with the parsed JSON data and depth of 0.
a. If the JSON input is a list, loop over each element of the list and pass
the element as an input for `max-depth` and increment the depth. Repeat step 2.
b. If the JSON input is a hash table, use each values of the table as the input list
for step 2.a.
c. If the JSON input is a number, string, boolean, or null, return the depth.
3. Print the maximum depth.
|#
(: max-depth (-> JSExpr Real Real))
(define (max-depth input depth)
(let ([depth+1 (+ depth 1)])
(cond [(list? input)
(apply max
(cons depth+1
(map (λ ([elem : JSExpr]) : Real
(max-depth elem depth+1))
input)))]
[(hash? input)
(apply max
(cons depth+1
(hash-map input
(λ ([_ : Symbol] [value : JSExpr]) : Real
(max-depth value depth+1)))))]
[else depth])))
(: main (-> (U JSExpr EOF) Real))
(define (main input)
(cond [(equal? eof input) 0]
[else (max-depth input 0)]))
(displayln (main (read-json)))
#| Tests [Remove line to run after input, if all pass, nothing will display.]
(require (only-in typed/rackunit check-equal?))
(check-equal? (main (string->jsexpr "[]")) 1)
(check-equal? (main (string->jsexpr "[1, 2, 3, 4, 5]")) 1)
(check-equal? (main (string->jsexpr "[ [ 123], []]")) 2)
(check-equal? (main (string->jsexpr "[{\"a\": []}, [\"abc\"]]")) 3)
(check-equal? (main (string->jsexpr "{\"a\": 321}")) 1)
(check-equal? (main (string->jsexpr "{\"a\": []}")) 2)
(check-equal? (main (string->jsexpr "\"hello\"")) 0)
(check-equal? (main (string->jsexpr "[\"abc[]bca\"]")) 1)
Tests [Remove line to run after input, if all pass, nothing will display.] |#
Compilation
Once Racket is installed one your system, simply run:
raco exe /path/to/this_file.rkt
Best compiled without the tests.
Explanation
Racket provides a nice built-in package called json that parses JSON expressions (JSExpr). All lists in the JSON expression are parsed into Racket Lists and all dictionaries are parsed into Hash Tables where the keys are Symbols and the values are other JSExprs. The reason I am using Typed Racket is that it is potentially faster than the dynamically typed language.
Conclusion
This is my submission for the challenge, I am not that great at optimizing code, so I don't expect peak performance in its runs. I did do a sample test on my machine with a 64kB minified JSON file which ran (according to time) at about 700ms. As a note though, my CPU is Intel 2 Duo E8500 and 2GB RAM, so I think it should be faster on other machines :)
Have an amazing weekend!
GNU AWK, ~11ms
BEGIN {
FPAT = "(\"([^\"])*\")+|."
m = 0
}
{
printf "%s -> ", $0
gsub(/'\\'\"[^'\\\"']*'\\'\"/, "", $0)
gsub(/\\"/, "\"\"", $0)
for (i = 1; i <= NF; i++) {
if ($i == "[" || $i == "{") {
a++
m = (a > m ? a : m)
} else if ($i == "]" || $i == "}") {
a--
}
}
print m
m = 0
}
Result:
123 -> 0
[] -> 1
[1, 2, 3, 4, 5] -> 1
[ [ 123], []] -> 2
[{"a": []}, ["abc"]] -> 3
{"a": 321} -> 1
{"a": []} -> 2
"hello" -> 0
["abc[]bca"] -> 1
["\",[],\""] -> 1
["",[],""] -> 2
["\"",[],""] -> 2
["\"",[],"\""] -> 2
"[\\" -> 0
Go, 715 bytes
package main
import (
"bufio"
"fmt"
"os"
)
func calcDepth(data string) int {
depth, maxDepth := 0, 0
inString := false
escaped := false
for _, r := range data {
switch r {
case '\\':
escaped = !escaped
case '"':
if !escaped {
inString = !inString
}
escaped = false
case '{', '[':
if !inString {
depth++
if depth > maxDepth {
maxDepth = depth
}
}
escaped = false
case '}', ']':
if !inString && depth > 0 {
depth--
}
escaped = false
default:
escaped = false
}
}
return maxDepth
}
func main() {
var data string
scanner := bufio.NewScanner(os.Stdin)
if scanner.Scan() {
data = scanner.Text()
}
fmt.Println(calcDepth(data))
}
JavaScript (Node.js)
I honestly have no idea how fast it is.
EDIT: Since the challenge is no longer asking for JSON validation, using JSON.parse() may now seem a bit overkill. But this built-in is apparently reasonably well optimized and still faster than a simpler 100% JS parser.
const readline = require('readline');
// set up the readline interface
const rl = readline.createInterface({
input: process.stdin,
output: process.stdout,
terminal: false
});
// the following callback is invoked when a line is read
rl.on('line', str => {
let json;
try {
// attempt to parse the line as JSON
json = JSON.parse(str);
}
catch(e) {
// if it fails, force the result to null
json = null;
}
// recursive function computing the max. depth
function search(node) {
// look for anything whose typeof is 'object' but is not null
if(typeof node == 'object' && node !== null) {
// for an array, use the node itself
// for a genuine object, use the list of its values
let list = Array.isArray(node) ? node : Object.values(node);
// return 1 right away if the list is empty
// otherwise, do a recursive call for each element in the list
// and return the highest result + 1
return list.length ? 1 + Math.max(...list.map(search)) : 1;
}
// return 0 for non-iterable elements
return 0;
}
// initial call to search
console.log(search(json));
// we're supposed to process only one line, so force exit
process.exit();
});
C, based on Philx0's answer
I tried to remove a few branches. Instead of storing state in a variable I store it... in the state. Namely, there are two loops depending on if we are parsing a string or not. The rest of the comparisons should be branchless when compiled with -O3.
Edit: Slightly improved version. It's possible to also remove most checks for "\n" by keeping track of whether the current depth is positive, tough I'm not sure how much it would improve performance.
#include <unistd.h>
#include <stdio.h>
#define BUFSIZE 4096
#define unlikely(x) __builtin_expect((x),0)
int main(void) {
int depth = 0;
int maxdepth = 0;
char buf[BUFSIZE];
ssize_t cnt;
ssize_t i;
ssize_t skip = 0;
int str = 0;
while ((cnt = read(0,buf,BUFSIZE))) {
i = skip;
if (!str) {
for (; i < cnt; i++) {
if (buf[i] == '"') goto instring;
if (unlikely(buf[i] == '\n')) goto print;
if (depth > maxdepth) maxdepth = depth;
depth+= buf[i] == '{' || buf[i] == '[';
depth-= buf[i] == '}' || buf[i] == ']';
outstring:;
}
str = 0;
} else {
for (; i < cnt; i++) {
if (buf[i] == '"') goto outstring;
if (unlikely(buf[i] == '\\')) i+=1;
instring:;
}
str = 1;
}
skip = i - cnt;
}
print:
printf("%d\n", maxdepth);
return 0;
}
JavaScript (Node.js)
const isArray = Array.isArray, objectValues = Object.values, max = Math.max;
require("readline").createInterface({ input: process.stdin, output: process.stdout, terminal: false }).on("line", str => {
const depth = JSON.parse(str, (key, value) => {
if (value === null || typeof value !== 'object') return 0;
const arr = isArray(value) ? value : objectValues(value);
const depth = -~max(...arr);
return depth;
});
console.log(depth);
process.exit();
});
The readline part is based on other answers.
Just in case if you are interesting, it can be golfed into 70 bytes, however, this question is not tagged as code-golf.
s=>JSON.parse(s,(_,v)=>Object(v)===v&&-~Math.max(...Object.values(v)))
Retina 0.8.2, 35 bytes
"(\\?.)*?"
T`[]`{}
[^{}]
+`}{
..
Try it online! Link includes test suite. Probably not going to win any awards for speed. Explanation:
"(\\?.)*?"
Delete all string literals.
T`[]`{}
Change square brackets into braces.
[^{}]
Delete everything other than braces.
+`}{
Merge adjacent containers at the same depth.
..
Count the maximum depth.
Rust
use serde_json crate. I don't know how fast it is.
upd 2023-06-19 \$\quad \$ In light of the comment of @lights0123, you can use cargo build --release instead of cargo build. It would likely be significantly faster in release mode.
src/main.rs
use std::io::{self, Read, BufRead};
use serde_json::Value;
use std::process;
fn main() -> io::Result<()> {
let stdin = io::stdin();
for line in stdin.lock().lines() {
let line = line?;
let json_value: Value = serde_json::from_str(&line).unwrap_or(Value::Null);
println!("{}", search(&json_value));
}
process::exit(0);
}
fn search(node: &Value) -> u32 {
match node {
Value::Object(map) => {
let max_depth = map.values().map(|v| search(v)).max();
match max_depth {
Some(max) => 1 + max,
None => 1,
}
},
Value::Array(arr) => {
let max_depth = arr.iter().map(|v| search(v)).max();
match max_depth {
Some(max) => 1 + max,
None => 1,
}
},
_ => 0,
}
}
Cargo.toml
[package]
name = "rust_hello"
version = "0.1.0"
edition = "2021"
# See more keys and their definitions at https:doc.rust-lang.org/cargo/reference/manifest.html
[dependencies]
serde_json = "1.0"
Build and running
# win10 environment
$ cargo build --release
$ target\release\rust_hello.exe
[{"a": []}, ["abc"]]
3
hello
0
{"a": []}
2
{"a": 321}
1
[ [ 123], []]
2
[1, 2, 3, 4, 5]
1
[]
1
[[]
0
A rewrite of noodle man’s solution in C
#include <stdio.h>
#define BUFSIZE 1024
int main(void) {
int str = 0;
int escape = 0; //backslash
int depth = 0;
int maxdepth = 0;
char buf[BUFSIZE];
while (fgets(buf, BUFSIZE, stdin)) {
for (size_t i = 0; buf[i]; i++) {
if (buf[i] == '\n') goto print;
if (escape) {
escape = 0;
continue;
}
if (buf[i] == '\\') {
escape = 1;
} else if (buf[i] == '\"') {
str = !str;
} else if (!str) {
if (buf[i] == '{' || buf[i] == '[') {
depth++;
if (depth > maxdepth) maxdepth = depth;
} else if (buf[i] == '}' || buf[i] == ']') depth--;
}
}
}
print:;
printf("%d\n", maxdepth);
}```
JavaScript (Node.js), 621 bytes
require("readline").createInterface({ input: process.stdin, output: process.stdout, terminal: false }).on("line", file => {
let inString = false;
let prevBackslash = false;
let depth = 0;
let maxDepth = 0;
for (const char of file) {
if (prevBackslash) {
prevBackslash = false;
continue;
}
if (char === "\\") prevBackslash = true;
if (char === '"' && !prevBackslash) inString = !inString
if (inString) continue;
if ("{[".includes(char)) {
if (++depth > maxDepth) maxDepth++;
}
if ("]}".includes(char)) depth--;
}
console.log(maxDepth);
process.exit();
});
This should be faster since it doesn't check if the JSON is valid, which is no longer required. If checking the JSON was valid was required, a solution like this manually verifying validity while checking the depth might still be faster, but I don't feel like writing a JSON parser.
All this does is loop over each character of the input, ignoring strings, incrementing a depth counter whenever it encounters a { or [, decrementing the counter when it encounters a } or ], and keeping track of the maximum depth.