-
Notifications
You must be signed in to change notification settings - Fork 0
Expand file tree
/
Copy pathparserToElk.py
More file actions
149 lines (124 loc) · 4.44 KB
/
parserToElk.py
File metadata and controls
149 lines (124 loc) · 4.44 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
#!/usr/bin/python
# -*- coding: utf-8 -*-
import logging
import requests
import json
import csv
import elasticsearch
import getopt
import sys
def addToElasticBulk(payload):
try:
logging.debug(payload)
r = requests.post("http://elasticsearch.tz:9200/hackacity-%s/doc/_bulk"%(indexSuffix,), data=payload, headers={"Content-Type": "application/json"})
logging.debug(r.text)
except Exception as e:
logging.error("Failed to push event to elastic: " + str(e))
def addToElastic(payload):
try:
logging.debug(payload)
r = requests.post("http://elasticsearch.tz:9200/hackacity-%s/doc"%(indexSuffix,), json=payload)
logging.debug(r.text)
except Exception as e:
logging.error("Failed to push event to elastic: " + str(e))
def getDocuments(index,query={"query": {"match_all": {}}}):
rs = es.search(index=index, body=query, scroll='10s', preference='_primary_first', size=10000)
sid=rs['_scroll_id']
total = rs['hits']['total']
results = rs['hits']['hits']
while (len(results) < total):
rs = es.scroll(scroll_id=sid, scroll='10s')
results += rs['hits']['hits']
sid=rs['_scroll_id']
return results
def addInBulkMode(payload,sendLast=False):
global batchData, batchCount
if sendLast:
addToElasticBulk(batchData)
return
batchData += '{"index": {"_index": "%s", "_type": "doc"}}'%("hackacity-"+indexSuffix) + "\n"
batchData += json.dumps(payload) + "\n"
batchCount += 1
if batchCount >= batchSize:
addToElasticBulk(batchData)
batchData = ""
batchCount = 0
def parseCsvFile_POI(filename):
payload = {}
payload["category"] = category
payload["type"] = datatype
with open(filename, mode='r') as csv_file:
line_count = 0
csv_reader = csv.reader(csv_file, delimiter=',')
for row in csv_reader:
if line_count == 0:
headers = row
line_count = 1
continue
if int(row[3]) == 0:
continue
try:
payload["value"] = int(row[3])
location = {}
location["lat"] = row[1]
location["lon"] = row[2]
payload["location"] = location
except ValueError as ve:
logging.error("Failed to parse entry: %s due to %s "%(str(row),str(ve)) )
addInBulkMode(payload)
addInBulkMode(None,True)
def parseCsvFile_IOT(filename):
payload = {}
payload["category"] = category
payload["type"] = datatype
with open(filename, mode='r') as csv_file:
line_count = 0
csv_reader = csv.reader(csv_file, delimiter=',')
for row in csv_reader:
if line_count == 0:
headers = row
line_count = 1
continue
try:
payload["date"] = int(row[0].split(".")[0])*1000
payload["value"] = float(row[3])
location = {}
location["lat"] = row[1]
location["lon"] = row[2]
payload["location"] = location
except ValueError as ve:
logging.error("Failed to parse entry: %s due to %s "%(str(row),str(ve)) )
addInBulkMode(payload)
addInBulkMode(None,True)
if __name__ == '__main__':
LOGGING_LEVEL = "DEBUG"
logging.basicConfig(level=LOGGING_LEVEL, format='%(asctime)s %(levelname)-4s %(message)s')
es = elasticsearch.Elasticsearch(['http://elasticsearch.tz:9200'],timeout=30)
batchData = ""
batchCount = 0
batchSize = 1000
try:
opts, args = getopt.getopt(sys.argv[1:], "f:e:")
except getopt.GetoptError, err:
logging.error("problem with arguments")
sys.exit(2)
for o, a in opts:
if o == "-f":
filename = a
elif o == "-e":
coiso = int(a)
else:
sys.exit(2)
#print filename
#filename = "output/IOT_AirQuality_CO.csv"
index,category,datatype=filename.split("/")[1].split(".")[0].split("_")
#sys.exit(0)
indexSuffix = index.lower()
#indexSuffix += "-" + category.lower()
if index == "POI":
parseCsvFile_POI(filename)
else:
parseCsvFile_IOT(filename)
"""
IOT_AirQuality_CO.csv IOT_AirQuality_NO2.csv IOT_AirQuality_O3.csv IOT_AirQuality_Ox.csv IOT_AirQuality_PM10.csv IOT_AirQuality_PM1.csv IOT_AirQuality_PM25.csv IOT_NoiseLevelObserved_LAeq.csv
"""