-
Notifications
You must be signed in to change notification settings - Fork 0
/
test.py
68 lines (55 loc) · 1.71 KB
/
test.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
# convert xml to json
import csv
import json
import os
def conCSV2JSON(csvFile,jsonFile):
data =[]
with open(csvFile) as f:
reader = csv.DictReader(f)
for rows in reader:
# print(rows)
# for key in rows:
# print(key)
j = {}
for key in rows:
j[key] = rows[key]
data.append(j)
#print(json.dumps(data, indent=4))
with open(jsonFile, 'w') as jsonFile:
jsonFile.write(json.dumps(data, indent=4))
def breakUpJson(jsonFile,jsonFile2):
listOfJson = []
# newListOfJson = []
newNewListOfJson = ""
with open(jsonFile) as jf:
listOfJson = json.loads(jf.read())
print(len(listOfJson))
for item in listOfJson:
# newListOfJson.append(json.dumps(item) + '\n')
newNewListOfJson = newNewListOfJson + json.dumps(item) + '\n'
with open(jsonFile2, 'w') as jsonFile:
# jsonFile.write(json.dumps(newListOfJson))
jsonFile.write(newNewListOfJson)
# next line after each json file
def viewJson(jsonFile):
with open(jsonFile) as j:
jf = json.loads(j.read())
print(json.dumps (jf, indent=4))
##
# possibly todo lambda async break up
# catch errors
# one line json objs
# log the errors
# fan out to pars
# async with que put all the days for map
##
def main():
userhome = os.path.expanduser('~')
j = userhome + "/Projects/spark-test/test-json/10681_2017_1876_MOESM1_ESM.json"
j2 = userhome + "/Projects/spark-test/test-json/t2.json"
j3 = userhome + "/Projects/spark-test/test-json/t3.json"
print(j)
breakUpJson(j,j3)
viewJson(j3)
if __name__ == '__main__':
main()