123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210 |
- # Utilities to read/write csv files
- import csv
- # Utilities to handle character encodings
- import unicodedata
- # Ordered Dicts
- from collections import OrderedDict
- from urllib.request import urlopen
- from bs4 import BeautifulSoup
- import json
- # OPZIONAL IMPORTS
- # For timestamping/simple speed tests
- from datetime import datetime
- # Random number generator
- from random import *
- # System & command line utilities
- import sys
- # Json for the dictionary
- import json
- import_dir = '/Users/federicaspinelli/TEAMOVI/Parser/DATA/MPP/CSV/corretti/'
- export_dir = '/Users/federicaspinelli/TEAMOVI/Parser/DATA/MPP/DATE/'
- # Custom class to store URIs + related infos for the ontologies/repositories
- class RDFcoords:
- def __init__(self, uri, prefix, code=None):
- self.uri = uri
- self.prefix = prefix
- self.code = code
- # Repositories
- museoCoords = RDFcoords('<https://palazzopretorio.prato.it/it/le-opere/alcuni-capolavori/>', 'mpp:')
- autCoords = RDFcoords('<https://palazzopretorio.prato.it/it/opere/autori/>', 'aut:')
- cidocCoords = RDFcoords('<http://www.cidoc-crm.org/cidoc-crm/>', 'crm:')
- aatCoords = RDFcoords('<http://vocab.getty.edu/aat/>', 'aat:')
- nsCoords = RDFcoords('<http://www.w3.org/1999/02/22-rdf-syntax-ns#>', 'rdf:')
- schemaCoords = RDFcoords('<http://www.w3.org/2000/01/rdf-schema#>', 'rdfs:')
- xsdCoords = RDFcoords('<http://www.w3.org/2001/XMLSchema#>', 'xsd:')
- iconCoords = RDFcoords('<http://iconclass.org/>', 'ico:')
- yearCoords = RDFcoords('<https://www.w3.org/TR/owl-time#year>', 'year:')
- monthCoords = RDFcoords('<https://www.w3.org/TR/owl-time#month>', 'month:')
- dayCoords = RDFcoords('<https://www.w3.org/TR/owl-time#day>', 'day:')
- beginningCoords = RDFcoords('<https://www.w3.org/TR/owl-time#hasBeginning>', 'beg:')
- endCoords = RDFcoords('<https://www.w3.org/TR/owl-time#hasEnd>', 'end:')
- rdfsCoords = RDFcoords('<http://www.w3.org/2000/01/rdf-schema#>', 'rdfs:')
- # Basic functions for triples / shortened triples in TTL format
- def triple(subject, predicate, object1):
- line = subject + ' ' + predicate + ' ' + object1
- return line
- def doublet(predicate, object1):
- line = ' ' + predicate + ' ' + object1
- return line
- def singlet(object1):
- line = ' ' + object1
- return line
- # Line endings in TTL format
- continueLine1 = ' ;\n'
- continueLine2 = ' ,\n'
- closeLine = ' .\n'
- def writeTTLHeader(output):
- output.write('@prefix ' + museoCoords.prefix + ' ' + museoCoords.uri + closeLine)
- output.write('@prefix ' + cidocCoords.prefix + ' ' + cidocCoords.uri + closeLine)
- output.write('@prefix ' + aatCoords.prefix + ' ' + aatCoords.uri + closeLine)
- output.write('@prefix ' + schemaCoords.prefix + ' ' + schemaCoords.uri + closeLine)
- output.write('@prefix ' + nsCoords.prefix + ' ' + nsCoords.uri + closeLine)
- output.write('@prefix ' + autCoords.prefix + ' ' + autCoords.uri + closeLine)
- output.write('@prefix ' + xsdCoords.prefix + ' ' + xsdCoords.uri + closeLine)
- output.write('@prefix ' + iconCoords.prefix + ' ' + iconCoords.uri + closeLine)
- output.write('@prefix ' + beginningCoords.prefix + ' ' + beginningCoords.uri + closeLine)
- output.write('@prefix ' + endCoords.prefix + ' ' + endCoords.uri + closeLine)
- output.write('@prefix ' + yearCoords.prefix + ' ' + yearCoords.uri + closeLine)
- output.write('@prefix ' + monthCoords.prefix + ' ' + monthCoords.uri + closeLine)
- output.write('@prefix ' + dayCoords.prefix + ' ' + dayCoords.uri + closeLine)
- output.write('@prefix ' + rdfsCoords.prefix + ' ' + rdfsCoords.uri + closeLine)
-
- output.write('\n')
- filePrefix = 'AR20AUT_'
- fileType = 'DATINI'
- max_entries = 1000000000
- with open(import_dir + filePrefix + fileType + '.csv', newline="") as csv_file, open(export_dir + filePrefix + fileType + '_normalizzate.ttl', 'w') as output:
- reader = csv.DictReader(csv_file)
- writeTTLHeader(output)
- first = True
- ii = 0
- for row in reader:
- # The index ii is used to process a limited number of entries for testing purposes
- ii = ii + 1
- # if row['RVEL'] == '' or row['RVEL'] == '0':
- # E12 - P4 - E52
- # if row['DTSI'] != '':
- # line = triple(museoCoords.prefix + row['DTSI'], yearCoords.prefix, '\"'+row['DTSI'] +'\"^^xsd:integer')+ closeLine
- # output.write(line)
- # # line = triple(museoCoords.prefix + row['DTSI'], monthCoords.prefix, '\"'+ '01' +'\"^^xsd:integer') + closeLine
- # # output.write(line)
- # # line = triple(museoCoords.prefix + row['DTSI'], dayCoords.prefix, '\"'+ '01' +'\"^^xsd:integer') + closeLine
- # # output.write(line)
- # line = triple(museoCoords.prefix + row['DTSI'], beginningCoords.prefix, '\"'+row['DTSI']+'0101'+'\"^^xsd:date') + closeLine
- # output.write(line)
- # e12FplaceHolder = ''
- # if row['DTSI'] != row['DTSF']:
- # e12FplaceHolder = museoCoords.prefix + row['URL'] + '_E12F'
- # if e12FplaceHolder != '':
- # line = triple(museoCoords.prefix + row['DTSF'], yearCoords.prefix, '\"'+row['DTSF'] +'\"^^xsd:integer') + closeLine
- # output.write(line)
- # # line = triple(museoCoords.prefix + row['DTSF'], monthCoords.prefix, '\"'+ '12' +'\"^^xsd:integer') + closeLine
- # # output.write(line)
- # # line = triple(museoCoords.prefix + row['DTSF'], dayCoords.prefix, '\"'+ '31' +'\"^^xsd:integer') + closeLine
- # # output.write(line)
- # line = triple(museoCoords.prefix + row['DTSF'], endCoords.prefix, '\"'+row['DTSF']+'1231'+'\"^^xsd:date') + closeLine
- # output.write(line)
- if row['AUTD'] != '':
- tt = row['AUTD'].replace(' ', '')
- tim = tt.replace('/', '')
- time = tim.replace('.', '')
- # line = triple(museoCoords.prefix + time, yearCoords.prefix, '\"'+row['AUTD'] +'\"^^xsd:integer') + closeLine
- # output.write(line)
- # line = triple(museoCoords.prefix + time, beginningCoords.prefix, '\"'+row['AUTD']+'0101'+'\"^^xsd:date') + closeLine
- # output.write(line)
- line = triple(museoCoords.prefix + time, endCoords.prefix, '\"'+row['AUTD']+'1231'+'\"^^xsd:date') + closeLine
- output.write(line)
- if row['AUTT'] != '':
- tt = row['AUTT'].replace(' ', '')
- tim = tt.replace('/', '')
- time = tim.replace('.', '')
- # line = triple(museoCoords.prefix + time, yearCoords.prefix, '\"'+row['AUTT'] +'\"^^xsd:integer') + closeLine
- # output.write(line)
- # line = triple(museoCoords.prefix + time, endCoords.prefix, '\"'+row['AUTT']+'1231'+'\"^^xsd:date') + closeLine
- # output.write(line)
- line = triple(museoCoords.prefix + time, beginningCoords.prefix, '\"'+row['AUTT']+'0101'+'\"^^xsd:date') + closeLine
- output.write(line)
-
-
-
- output.write('\n')
- #
- #
- # Limit number of entries processed (if desired)
- if(ii>max_entries):
- break
-
- # if row['PRDI' + f] != '':
- # timespan = row['PRDI' + f]
- # tt = timespan.replace(' ', '')
- # tp = tt.replace('.', '')
- # ts = tp.replace('/', '')
- # timespanPlaceholder = museoCoords.prefix + '_' + ts
- # # E10 P4 E52
- # line = triple(newe10placeHolder,
- # cidocCoords.prefix + 'P4_has_time-span',
- # timespanPlaceholder) + closeLine
- # output.write(line)
- # line = triple(timespanPlaceholder,
- # nsCoords.prefix + 'type',
- # cidocCoords.prefix + 'E52_Time-Span') + closeLine
- # output.write(line)
- # line = triple(timespanPlaceholder,
- # schemaCoords.prefix + 'label',
- # '\"' + timespan + '\"') + closeLine
- # output.write(line)
- # if row['PRDU' + last] != '':
- # timespan = row['PRDU' + last]
- # tt = timespan.replace(' ', '')
- # ts = tt.replace('/', '')
- # timespanPlaceholder = museoCoords.prefix + '_' + ts
- # # E10 P4 E52
- # line = triple(e10placeHolder,
- # cidocCoords.prefix + 'P4_has_time-span',
- # timespanPlaceholder) + closeLine
- # output.write(line)
- # line = triple(timespanPlaceholder,
- # nsCoords.prefix + 'type',
- # cidocCoords.prefix + 'E52_Time-Span') + closeLine
- # output.write(line)
- # line = triple(timespanPlaceholder,
- # schemaCoords.prefix + 'label',
- # '\"' + timespan + '\"') + closeLine
- # output.write(line)
|