2017-10-13 11 views
0

나는 elasticsearch의 집계 된 검색 결과를 jq로 구문 분석하여 CSV를 작성하려고합니다. 그러나 필요한 결과를 얻는 것은 정말로 어렵습니다. 누군가가 도울 수 있기를 바랍니다. (-> 타임 스탬프가 'key_as_string'에 있습니다) 실제로 내가 매일 이러한 결과를 가지고,이 조금 잘린가JQ : 집계 된 json에서 CSV 작성

[ 
    { 
    "key_as_string": "2017-09-01T00:00:00.000+02:00", 
    "key": 1506808800000, 
    "doc_count": 5628, 
    "agg1": { 
     "doc_count_error_upper_bound": 5, 
     "sum_other_doc_count": 1193, 
     "buckets": [ 
     { 
      "key": "value3", 
      "doc_count": 3469, 
      "agg2": { 
      "doc_count_error_upper_bound": 1, 
      "sum_other_doc_count": 3459, 
      "buckets": [ 
       { 
       "key": "10367.xxx", 
       "doc_count": 1 
       }, 
       { 
       "key": "10997.xxx", 
       "doc_count": 1 
       }, 
       { 
       "key": "12055.xxx", 
       "doc_count": 1 
       }, 
       { 
       "key": "12157.xxx", 
       "doc_count": 1 
       }, 
       { 
       "key": "12435.xxx", 
       "doc_count": 1 
       }, 
       { 
       "key": "12volt.xxx", 
       "doc_count": 1 
       }, 
       { 
       "key": "13158.xxx", 
       "doc_count": 1 
       }, 
       { 
       "key": "13507.xxx", 
       "doc_count": 1 
       }, 
       { 
       "key": "13597.xxx", 
       "doc_count": 1 
       }, 
       { 
       "key": "137.xxx", 
       "doc_count": 1 
       } 
      ] 
      } 
     }, 
     { 
      "key": "value2", 
      "doc_count": 608, 
      "agg2": { 
      "doc_count_error_upper_bound": 0, 
      "sum_other_doc_count": 577, 
      "buckets": [ 
       { 
       "key": "saasf.xxx", 
       "doc_count": 7 
       }, 
       { 
       "key": "asfasf.xxx", 
       "doc_count": 5 
       }, 
       { 
       "key": "sasfsd.xxx", 
       "doc_count": 3 
       }, 
       { 
       "key": "werwer.xxx", 
       "doc_count": 3 
       }, 
       { 
       "key": "werwre.xxx", 
       "doc_count": 3 
       }, 
       { 
       "key": "a-werwr.xxx", 
       "doc_count": 2 
       }, 
       { 
       "key": "aef.xxx", 
       "doc_count": 2 
       }, 
       { 
       "key": "sadhdhh.xxx", 
       "doc_count": 2 
       }, 
       { 
       "key": "dhsdfsdg.xxx", 
       "doc_count": 2 
       }, 
       { 
       "key": "ertetrt.xxx", 
       "doc_count": 2 
       } 
      ] 
      } 
     }, 
     { 
      "key": "value1", 
      "doc_count": 358, 
      "agg2": { 
      "doc_count_error_upper_bound": 0, 
      "sum_other_doc_count": 336, 
      "buckets": [ 
       { 
       "key": "fhshfg.xxx", 
       "doc_count": 3 
       }, 
       { 
       "key": "sgh.xxx", 
       "doc_count": 3 
       }, 
       { 
       "key": "12.xxx", 
       "doc_count": 2 
       }, 
       { 
       "key": "sbgs.xxx", 
       "doc_count": 2 
       }, 
       { 
       "key": "dp-eca.xxx", 
       "doc_count": 2 
       }, 
       { 
       "key": "ztuhfb.xxx", 
       "doc_count": 2 
       }, 
       { 
       "key": "javascript.xxx", 
       "doc_count": 2 
       }, 
       { 
       "key": "koi-fdhfh.xxx", 
       "doc_count": 2 
       }, 
       { 
       "key": "sdfh.xxx", 
       "doc_count": 2 
       }, 
       { 
       "key": "etz5.xxx", 
       "doc_count": 2 
       } 
      ] 
      } 
     } 
     ] 
    } 
    } 
] 

이다 : 나는 다음과 같은 JSON이있다. 그러나, 나는 나에게 다음과 같은 결과를 줄 이는 CSV이 필요합니다

2017-09-01T00:00:00.000+02:00,value3,10367.xxx,1 
2017-09-01T00:00:00.000+02:00,value3,10997.xxx,1 
... 
2017-09-01T00:00:00.000+02:00,value2,saasf.xxx,7 
2017-09-01T00:00:00.000+02:00,value2,asfasf.xxx,5 
... 
2017-09-01T00:00:00.000+02:00,value1,fhshfg.xxx,3 
2017-09-01T00:00:00.000+02:00,value1,sgh.xxx,3 
.. 

답변

1

JQ를 솔루션 : (현재 입력)

jq -r '.[] | .key_as_string as $ks | .agg1.buckets[] | .key as $key 
      | .agg2.buckets[] | [$ks,$key,.key,.doc_count] | @csv' jsonfile 

출력 :

"2017-09-01T00:00:00.000+02:00","value3","10367.xxx",1 
"2017-09-01T00:00:00.000+02:00","value3","10997.xxx",1 
"2017-09-01T00:00:00.000+02:00","value3","12055.xxx",1 
"2017-09-01T00:00:00.000+02:00","value3","12157.xxx",1 
"2017-09-01T00:00:00.000+02:00","value3","12435.xxx",1 
"2017-09-01T00:00:00.000+02:00","value3","12volt.xxx",1 
"2017-09-01T00:00:00.000+02:00","value3","13158.xxx",1 
"2017-09-01T00:00:00.000+02:00","value3","13507.xxx",1 
"2017-09-01T00:00:00.000+02:00","value3","13597.xxx",1 
"2017-09-01T00:00:00.000+02:00","value3","137.xxx",1 
"2017-09-01T00:00:00.000+02:00","value2","saasf.xxx",7 
"2017-09-01T00:00:00.000+02:00","value2","asfasf.xxx",5 
"2017-09-01T00:00:00.000+02:00","value2","sasfsd.xxx",3 
"2017-09-01T00:00:00.000+02:00","value2","werwer.xxx",3 
"2017-09-01T00:00:00.000+02:00","value2","werwre.xxx",3 
"2017-09-01T00:00:00.000+02:00","value2","a-werwr.xxx",2 
"2017-09-01T00:00:00.000+02:00","value2","aef.xxx",2 
"2017-09-01T00:00:00.000+02:00","value2","sadhdhh.xxx",2 
"2017-09-01T00:00:00.000+02:00","value2","dhsdfsdg.xxx",2 
"2017-09-01T00:00:00.000+02:00","value2","ertetrt.xxx",2 
"2017-09-01T00:00:00.000+02:00","value1","fhshfg.xxx",3 
"2017-09-01T00:00:00.000+02:00","value1","sgh.xxx",3 
"2017-09-01T00:00:00.000+02:00","value1","12.xxx",2 
"2017-09-01T00:00:00.000+02:00","value1","sbgs.xxx",2 
"2017-09-01T00:00:00.000+02:00","value1","dp-eca.xxx",2 
"2017-09-01T00:00:00.000+02:00","value1","ztuhfb.xxx",2 
"2017-09-01T00:00:00.000+02:00","value1","javascript.xxx",2 
"2017-09-01T00:00:00.000+02:00","value1","koi-fdhfh.xxx",2 
"2017-09-01T00:00:00.000+02:00","value1","sdfh.xxx",2 
"2017-09-01T00:00:00.000+02:00","value1","etz5.xxx",2 
+0

고마워 ! :-) –