CSV_to_RDF_Microtoponimi.py 3.8 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113
  1. # Utilities to read/write csv files
  2. import csv
  3. # Utilities to handle character encodings
  4. import unicodedata
  5. # Ordered Dicts
  6. from collections import OrderedDict
  7. import json
  8. # OPZIONAL IMPORTS
  9. # For timestamping/simple speed tests
  10. from datetime import datetime
  11. # Random number generator
  12. from random import *
  13. # System & command line utilities
  14. import sys
  15. # Json for the dictionary
  16. import json
  17. import_dir = '/Users/leonardocanova/Library/CloudStorage/OneDrive-UniversityofPisa(1)/Documenti/Progetti università/OVI/Programmazione/ASPO/Luoghi/'
  18. export_dir = '/Users/leonardocanova/Library/CloudStorage/OneDrive-UniversityofPisa(1)/Documenti/Progetti università/OVI/Programmazione/ASPO/Luoghi/'
  19. # Custom class to store URIs + related infos for the ontologies/repositories
  20. class RDFcoords:
  21. def __init__(self, uri, prefix, code = None):
  22. self.uri = uri
  23. self.prefix = prefix
  24. self.code = code
  25. # Repositories
  26. cidocCoords = RDFcoords('<http://www.cidoc-crm.org/cidoc-crm/>', 'crm:')
  27. tgnCoords = RDFcoords('<http://vocab.getty.edu/tgn/>', 'tgn:')
  28. nsCoords = RDFcoords('<http://www.w3.org/1999/02/22-rdf-syntax-ns#>', 'rdf:')
  29. schemaCoords = RDFcoords('<http://www.w3.org/2000/01/rdf-schema#>', 'rdfs:')
  30. owlCoords = RDFcoords('<http://www.w3.org/2002/07/owl#>', 'owl:')
  31. devCoords = RDFcoords('<http://dev.restore.ovi.cnr.it/vocabularies/places/>', 'dev:')
  32. # Basic functions for triples / shortened triples in TTL format
  33. def triple(subject, predicate, object1):
  34. line = subject + ' ' + predicate + ' ' + object1
  35. return line
  36. def doublet(predicate, object1):
  37. line = ' ' + predicate + ' ' + object1
  38. return line
  39. def singlet(object1):
  40. line = ' ' + object1
  41. return line
  42. # Line endings in TTL format
  43. continueLine1 = ' ;\n'
  44. continueLine2 = ' ,\n'
  45. closeLine = ' .\n'
  46. def writeTTLHeader(output):
  47. output.write('@prefix ' + cidocCoords.prefix + ' ' + cidocCoords.uri + closeLine)
  48. output.write('@prefix ' + tgnCoords.prefix + ' ' + tgnCoords.uri + closeLine)
  49. output.write('@prefix ' + schemaCoords.prefix + ' ' + schemaCoords.uri + closeLine)
  50. output.write('@prefix ' + nsCoords.prefix + ' ' + nsCoords.uri + closeLine)
  51. output.write('@prefix ' + owlCoords.prefix + ' ' + owlCoords.uri + closeLine)
  52. output.write('@prefix ' + devCoords.prefix + ' ' + devCoords.uri + closeLine)
  53. output.write('\n')
  54. file = "merge_luoghi_ASPO - comuni_microtoponimi_UNIQUE"
  55. max_entries = 1000000000
  56. with open(import_dir + file + '.csv', newline="") as csv_file, open(
  57. export_dir + file + '.ttl', 'w') as output:
  58. reader = csv.DictReader(csv_file)
  59. writeTTLHeader(output)
  60. first = True
  61. ii = 0
  62. for row in reader:
  63. # The index ii is used to process a limited number of entries for testing purposes
  64. ii = ii + 1
  65. #placeHolders
  66. devPlaceHolder_comune = devCoords.prefix + row['ID_RESTORE_comune']
  67. devPlaceHolder_provincia = devCoords.prefix + row['ID_PROVINCIA']
  68. if row['ID_RESTORE_microtoponimo'] != "":
  69. if row['ID_RESTORE_comune'] != "" and " ":
  70. devPlaceHolder_microtoponimo = devCoords.prefix + row['ID_RESTORE_microtoponimo']
  71. line = triple(devPlaceHolder_microtoponimo,
  72. cidocCoords.prefix + 'P89_falls_within',
  73. devPlaceHolder_comune) + closeLine
  74. output.write(line)
  75. else:
  76. if row ['ID_PROVINCIA'] != "" and " ":
  77. line = triple(devPlaceHolder_microtoponimo,
  78. cidocCoords.prefix + 'P89_falls_within',
  79. devPlaceHolder_provincia) + closeLine
  80. output.write(line)
  81. output.write('\n')
  82. #
  83. #
  84. # Limit number of entries processed (if desired)
  85. if (ii > max_entries):
  86. break