CSV_to_RDF_mpp_Move.py 11 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322
  1. # Utilities to read/write csv files
  2. import csv
  3. # Utilities to handle character encodings
  4. import unicodedata
  5. # Ordered Dicts
  6. from collections import OrderedDict
  7. import json
  8. # OPZIONAL IMPORTS
  9. # For timestamping/simple speed tests
  10. from datetime import datetime
  11. # Random number generator
  12. from random import *
  13. # System & command line utilities
  14. import sys
  15. # Json for the dictionary
  16. import json
  17. import_dir = '/Users/alessiaspadi/Documents/RESTORE/temp_MPP/tabelle/Ospedale/mod/'
  18. export_dir = '/Users/alessiaspadi/Documents/RESTORE/temp_MPP/tabelle/Ospedale/mod/E9_'
  19. # Custom class to store URIs + related infos for the ontologies/repositories
  20. class RDFcoords:
  21. def __init__(self, uri, prefix, code=None):
  22. self.uri = uri
  23. self.prefix = prefix
  24. self.code = code
  25. # Repositories
  26. museoCoords = RDFcoords('<http://palazzopretorio.comune.prato.it/it/le-opere/alcuni-capolavori/>', 'mpp:')
  27. cidocCoords = RDFcoords('<http://www.cidoc-crm.org/cidoc-crm/>', 'crm:')
  28. aatCoords = RDFcoords('<http://vocab.getty.edu/aat/>', 'aat:')
  29. nsCoords = RDFcoords('<http://www.w3.org/1999/02/22-rdf-syntax-ns#>', 'rdf:')
  30. schemaCoords = RDFcoords('<http://www.w3.org/2000/01/rdf-schema#>', 'rdfs:')
  31. # Basic functions for triples / shortened triples in TTL format
  32. def triple(subject, predicate, object1):
  33. line = subject + ' ' + predicate + ' ' + object1
  34. return line
  35. def doublet(predicate, object1):
  36. line = ' ' + predicate + ' ' + object1
  37. return line
  38. def singlet(object1):
  39. line = ' ' + object1
  40. return line
  41. # Line endings in TTL format
  42. continueLine1 = ' ;\n'
  43. continueLine2 = ' ,\n'
  44. closeLine = ' .\n'
  45. def writeTTLHeader(output):
  46. output.write('@prefix ' + museoCoords.prefix + ' ' + museoCoords.uri + closeLine)
  47. output.write('@prefix ' + cidocCoords.prefix + ' ' + cidocCoords.uri + closeLine)
  48. output.write('@prefix ' + aatCoords.prefix + ' ' + aatCoords.uri + closeLine)
  49. output.write('@prefix ' + schemaCoords.prefix + ' ' + schemaCoords.uri + closeLine)
  50. output.write('@prefix ' + nsCoords.prefix + ' ' + nsCoords.uri + closeLine)
  51. output.write('\n')
  52. filePrefix = 'SR20OA_'
  53. fileType = 'Ospedale'
  54. max_entries = 1000000000
  55. with open(import_dir + filePrefix + fileType + '.csv', newline="") as csv_file, open(
  56. export_dir + filePrefix + fileType + '.ttl', 'w') as output:
  57. reader = csv.DictReader(csv_file)
  58. writeTTLHeader(output)
  59. first = True
  60. ii = 0
  61. for row in reader:
  62. # The index ii is used to process a limited number of entries for testing purposes
  63. ii = ii + 1
  64. if row['RVEL'] == '' or row['RVEL'] == '0':
  65. # Triplify the 'codice' -- should exist for every entry
  66. codice = ''
  67. if (row['NCTR'] != '' and row['NCTN'] != ''):
  68. codice = row['NCTR'] + row['NCTN']
  69. url = row['URL']
  70. # placeHolders
  71. datplaceHolder = museoCoords.prefix + url
  72. e53placeHolder = museoCoords.prefix + url + '_E53'
  73. e9placeHolder = museoCoords.prefix + url + '_E9'
  74. columnName = list(row)
  75. tcl = []
  76. for name in columnName:
  77. if 'TCL' in name:
  78. tcl.append(name)
  79. j=0
  80. for el in tcl:
  81. if row[el] != '':
  82. j = j+1
  83. last = str(j-1)
  84. n = len(tcl) - 1
  85. for i in range(n - 1):
  86. k = str(i + 1)
  87. if i + 1 == 1:
  88. w = ''
  89. else:
  90. w = i
  91. f = str(w)
  92. if row['TCL' + k] != '':
  93. pastLocation = ''
  94. newLocation = ''
  95. pl = ''
  96. if row['PRCD' + k] != '':
  97. newLocation = ' a ' + row['PRCD' + k]
  98. if row['PRCD' + f] != '':
  99. pastLocation = ' da ' + row['PRCD' + f]
  100. pl = row['PRCD' + f].replace(' ', '')
  101. newe9placeHolder = museoCoords.prefix + url + "_E9_" + k
  102. line = triple(datplaceHolder,
  103. cidocCoords.prefix + 'P25i_moved_by',
  104. newe9placeHolder) + closeLine
  105. output.write(line)
  106. line = triple(newe9placeHolder,
  107. nsCoords.prefix + 'type',
  108. cidocCoords.prefix + 'E9_Move') + closeLine
  109. output.write(line)
  110. line = triple(newe9placeHolder,
  111. schemaCoords.prefix + 'label',
  112. '\"Trasferimento di ' + row['SGTI'] + pastLocation +
  113. newLocation + '\"') + closeLine
  114. output.write(line)
  115. timespan = ''
  116. ts = ''
  117. if row['PRDI' + f] != '':
  118. timespan = row['PRDI' + f]
  119. if row['PRDU' + f] != '':
  120. timespan = timespan + ' - ' + row['PRDU' + f]
  121. tt = timespan.replace(' ', '')
  122. ts = tt.replace('/', '')
  123. timespanPlaceholder = museoCoords.prefix + url + '_' + ts
  124. pastLocationPlaceholder = museoCoords.prefix + url + '_' + pl
  125. newLoc = row['PRCD' + k].replace(' ', '')
  126. newLocationPlaceholder = museoCoords.prefix + url + '_' + newLoc
  127. # E9 P4 E52
  128. line = triple(newe9placeHolder,
  129. cidocCoords.prefix + 'P4_has_time-span',
  130. timespanPlaceholder) + closeLine
  131. output.write(line)
  132. line = triple(timespanPlaceholder,
  133. nsCoords.prefix + 'type',
  134. cidocCoords.prefix + 'E52_Time-Span') + closeLine
  135. output.write(line)
  136. line = triple(timespanPlaceholder,
  137. schemaCoords.prefix + 'label',
  138. '\"' + timespan + '\"') + closeLine
  139. output.write(line)
  140. # E9 P26 E53 (moved to)
  141. if newLocationPlaceholder != '':
  142. line = triple(newe9placeHolder,
  143. cidocCoords.prefix + 'P26_moved_to',
  144. newLocationPlaceholder) + closeLine
  145. output.write(line)
  146. # E9 P27 E53
  147. pastLocationLabel = row['PRCD' + f]
  148. if row['PRCU' + f] != '':
  149. pastLocationLabel = pastLocationLabel + ', ' + row['PRCU' + f]
  150. if row['PRVC' + f] != '':
  151. pastLocationLabel = pastLocationLabel + ', ' + row['PRVC' + f]
  152. if row['PRVP' + f] != '':
  153. pastLocationLabel = pastLocationLabel + ' (' + row['PRVP' + f] + ')'
  154. if row['PRVR' + f] != '':
  155. pastLocationLabel = pastLocationLabel + ', ' + row['PRVR' + f]
  156. if row['PRVS' + f] != '':
  157. pastLocationLabel = pastLocationLabel + ', ' + row['PRVS' + f]
  158. line = triple(newe9placeHolder,
  159. cidocCoords.prefix + 'P27_moved_from',
  160. pastLocationPlaceholder) + closeLine
  161. output.write(line)
  162. line = triple(pastLocationPlaceholder,
  163. nsCoords.prefix + 'type',
  164. cidocCoords.prefix + 'E74_Group') + closeLine
  165. output.write(line)
  166. line = triple(pastLocationPlaceholder,
  167. schemaCoords.prefix + 'label',
  168. '\"' + pastLocationLabel + '\"') + closeLine
  169. output.write(line)
  170. pastLocation = ''
  171. newLocation = ''
  172. pl = ''
  173. if row['LDCN'] != '':
  174. newLocation = ' a ' + row['LDCN']
  175. if row['PRCD' + last] != '':
  176. pastLocation = ' da ' + row['PRCD' + last]
  177. pl = row['PRCD' + last].replace(' ', '')
  178. line = triple(datplaceHolder,
  179. cidocCoords.prefix + 'P25i_moved_by',
  180. e9placeHolder) + closeLine
  181. output.write(line)
  182. line = triple(e9placeHolder,
  183. nsCoords.prefix + 'type',
  184. cidocCoords.prefix + 'E9_Move') + closeLine
  185. output.write(line)
  186. line = triple(e9placeHolder,
  187. schemaCoords.prefix + 'label',
  188. '\"Trasferimento di ' + row['SGTI'] + pastLocation +
  189. newLocation + '\"') + closeLine
  190. output.write(line)
  191. timespan = ''
  192. ts = ''
  193. if row['PRDI' + last] != '':
  194. timespan = row['PRDI' + last]
  195. if row['PRDU' + last] != '':
  196. timespan = timespan + ' - ' + row['PRDU' + last]
  197. tt = timespan.replace(' ', '')
  198. ts = tt.replace('/', '')
  199. timespanPlaceholder = museoCoords.prefix + url + '_' + ts
  200. pastLocationPlaceholder = museoCoords.prefix + url + '_' + pl
  201. newLocationPlaceholder = e53placeHolder
  202. # E9 P4 E52
  203. line = triple(e9placeHolder,
  204. cidocCoords.prefix + 'P4_has_time-span',
  205. timespanPlaceholder) + closeLine
  206. output.write(line)
  207. line = triple(timespanPlaceholder,
  208. nsCoords.prefix + 'type',
  209. cidocCoords.prefix + 'E52_Time-Span') + closeLine
  210. output.write(line)
  211. line = triple(timespanPlaceholder,
  212. schemaCoords.prefix + 'label',
  213. '\"' + timespan + '\"') + closeLine
  214. output.write(line)
  215. # E9 P26 E53 (moved to)
  216. if newLocationPlaceholder != '':
  217. line = triple(e9placeHolder,
  218. cidocCoords.prefix + 'P26_moved_to',
  219. newLocationPlaceholder) + closeLine
  220. output.write(line)
  221. # E9 P27 E53
  222. pastLocationLabel = row['PRCD' + last] + ', ' + row['PRCU' + last] + ', ' + row['PRVC' + last] \
  223. + ' (' + row['PRVP' + last] + '), ' + row['PRVR' + last] + \
  224. ', ' + row['PRVS' + last]
  225. line = triple(e9placeHolder,
  226. cidocCoords.prefix + 'P27_moved_from',
  227. pastLocationPlaceholder) + closeLine
  228. output.write(line)
  229. line = triple(pastLocationPlaceholder,
  230. nsCoords.prefix + 'type',
  231. cidocCoords.prefix + 'E74_Group') + closeLine
  232. output.write(line)
  233. line = triple(pastLocationPlaceholder,
  234. schemaCoords.prefix + 'label',
  235. '\"' + pastLocationLabel + '\"') + closeLine
  236. output.write(line)
  237. output.write('\n')
  238. #
  239. #
  240. # Limit number of entries processed (if desired)
  241. if (ii > max_entries):
  242. break