{ "cells": [ { "cell_type": "code", "execution_count": 13, "metadata": {}, "outputs": [], "source": [ "# Utilities to read/write csv files\n", "import csv\n", "# Utilities to handle character encodings\n", "import unicodedata\n", "# Ordered Dicts\n", "from collections import OrderedDict\n", "\n", "import json\n", "import re\n", "\n", "\n", "# OPZIONAL IMPORTS\n", "\n", "# For timestamping/simple speed tests\n", "from datetime import datetime\n", "# Random number generator\n", "from random import *\n", "# System & command line utilities\n", "import sys\n", "# Json for the dictionary\n", "import json" ] }, { "cell_type": "code", "execution_count": 14, "metadata": {}, "outputs": [], "source": [ "import_dir = '/Users/federicaspinelli/Google Drive/OVI-CNR/CSV/ASPO/gettatelli/'\n", "export_dir = '/Users/federicaspinelli/Google Drive/OVI-CNR/RDF/ASPO/gettatelli/'" ] }, { "cell_type": "code", "execution_count": 15, "metadata": {}, "outputs": [], "source": [ "# Custom class to store URIs + related infos for the ontologies/repositories\n", "\n", "class RDFcoords:\n", " def __init__(self, uri, prefix, code = None):\n", " self.uri = uri\n", " self.prefix = prefix\n", " self.code = code\n", "\n", "# Repositories\n", "datiniCoords = RDFcoords('', 'dt:')\n", "personAuthCoords = RDFcoords('', 'pa:')\n", "# W3/CIDOC Predicates\n", "hasTypeCoords = RDFcoords('', 'tp:')\n", "hasNoteCoords = RDFcoords('', 'no:')\n", "hasTypePCoords = RDFcoords('', 'te:')\n", "carriesCoords = RDFcoords('', 'ca:')\n", "identifiedByCoords = RDFcoords('', 'ib:')\n", "labelCoords = RDFcoords('', 'lb:')\n", "wasBroughtCoords = RDFcoords('', 'wb:')\n", "wasBornCoords = RDFcoords('', 'wbc:')\n", "diedCoords = RDFcoords('', 'di:')\n", "wasPresentCoords = RDFcoords('', 'wp:')\n", "carriedByCoords = RDFcoords('', 'cb:')\n", "noteCoords = RDFcoords('', 'nt:')\n", "hasTimeSpanCoords = RDFcoords('', 'hs:')\n", "consistCoords = RDFcoords('', 'cf:')\n", "hasConditionCoords = RDFcoords('', 'hc:')\n", "foafCoords = RDFcoords('', 'foaf:')\n", "nsCoords = RDFcoords('', 'rdf:')\n", "rdfsCoords = RDFcoords('', 'rdfs:')\n", "aspoCoords = RDFcoords('', 'aspo:')\n", "\n", "# CIDOC Objects\n", "manMadeObjectCoords = RDFcoords('', 'mo:', 'E22')\n", "informationObjectCoords = RDFcoords('', 'io:', 'E73')\n", "titleCoords = RDFcoords('', 'ti:' ,'E35')\n", "placeAppellationCoords = RDFcoords('', 'pa:', 'E44')\n", "identifierCoords = RDFcoords('', 'id:', 'E42')\n", "typeCoords = RDFcoords('', 'ty:', 'E55')\n", "creationCoords = RDFcoords('', 'cr:', 'E65')\n", "personCoords = RDFcoords('', 'ps:', 'E21')\n", "stringCoords = RDFcoords('', 'st:', 'E62')\n", "birthCoords = RDFcoords('', 'th:', 'E67')\n", "deathCoords = RDFcoords('', 'dh:', 'E69')\n", "timeSpanCoords = RDFcoords('', 'ts:', 'E52')\n", "materialCoords = RDFcoords('', 'mt:', 'E57')\n", "conditionCoords = RDFcoords('', 'cs:', 'E3')\n", "entityCoords = RDFcoords('', 'ey:', 'E1')\n", "eventCoords = RDFcoords('', 'eve:', 'E5')\n", "refersCoords = RDFcoords('', 'rt:')\n", "refersHasTypeCoords = RDFcoords('', 'rh:')\n", "schemaCoords = RDFcoords('', 'schema:')\n", "owlCoords = RDFcoords('', 'owl:')\n", "fallsCoords = RDFcoords('', 'fw:', 'E89')\n", "residenceCoords = RDFcoords('', 'res:')\n", "placeCoords = RDFcoords('', 'pl:', 'E53')\n" ] }, { "cell_type": "code", "execution_count": 16, "metadata": {}, "outputs": [], "source": [ "# Basic functions for triples / shortened triples in TTL format\n", "\n", "def triple(subject, predicate, object1):\n", " line = subject + ' ' + predicate + ' ' + object1\n", " return line\n", "\n", "def doublet(predicate, object1):\n", " line = ' ' + predicate + ' ' + object1\n", " return line\n", "\n", "def singlet(object1):\n", " line = ' ' + object1\n", " return line\n", "\n", "# Line endings in TTL format\n", "continueLine1 = ' ;\\n'\n", "continueLine2 = ' ,\\n'\n", "closeLine = ' .\\n'" ] }, { "cell_type": "code", "execution_count": 17, "metadata": {}, "outputs": [], "source": [ "def writeTTLHeader(output):\n", " output.write('@prefix ' + datiniCoords.prefix + ' ' + datiniCoords.uri + closeLine)\n", " output.write('@prefix ' + personAuthCoords.prefix + ' ' + personAuthCoords.uri + closeLine)\n", " output.write('@prefix ' + hasTypeCoords.prefix + ' ' + hasTypeCoords.uri + closeLine)\n", " output.write('@prefix ' + hasTypePCoords.prefix + ' ' + hasTypePCoords.uri + closeLine)\n", " output.write('@prefix ' + manMadeObjectCoords.prefix + ' ' + manMadeObjectCoords.uri + closeLine)\n", " output.write('@prefix ' + carriesCoords.prefix + ' ' + carriesCoords.uri + closeLine)\n", " output.write('@prefix ' + informationObjectCoords.prefix + ' ' + informationObjectCoords.uri + closeLine)\n", " output.write('@prefix ' + identifiedByCoords.prefix + ' ' + identifiedByCoords.uri + closeLine)\n", " output.write('@prefix ' + titleCoords.prefix + ' ' + titleCoords.uri + closeLine)\n", " output.write('@prefix ' + labelCoords.prefix + ' ' + labelCoords.uri + closeLine)\n", " output.write('@prefix ' + identifierCoords.prefix + ' ' + identifierCoords.uri + closeLine)\n", " output.write('@prefix ' + wasBroughtCoords.prefix + ' ' + wasBroughtCoords.uri + closeLine)\n", " output.write('@prefix ' + typeCoords.prefix + ' ' + typeCoords.uri + closeLine)\n", " output.write('@prefix ' + carriedByCoords.prefix + ' ' + carriedByCoords.uri + closeLine)\n", " output.write('@prefix ' + personCoords.prefix + ' ' + personCoords.uri + closeLine)\n", " output.write('@prefix ' + stringCoords.prefix + ' ' + stringCoords.uri + closeLine)\n", " output.write('@prefix ' + noteCoords.prefix + ' ' + noteCoords.uri + closeLine)\n", " output.write('@prefix ' + wasBornCoords.prefix + ' ' + wasBornCoords.uri + closeLine)\n", " output.write('@prefix ' + diedCoords.prefix + ' ' + diedCoords.uri + closeLine)\n", " output.write('@prefix ' + wasPresentCoords.prefix + ' ' + wasPresentCoords.uri + closeLine)\n", " output.write('@prefix ' + birthCoords.prefix + ' ' + birthCoords.uri + closeLine)\n", " output.write('@prefix ' + deathCoords.prefix + ' ' + deathCoords.uri + closeLine)\n", " output.write('@prefix ' + hasTimeSpanCoords.prefix + ' ' + hasTimeSpanCoords.uri + closeLine)\n", " output.write('@prefix ' + timeSpanCoords.prefix + ' ' + timeSpanCoords.uri + closeLine)\n", " output.write('@prefix ' + materialCoords.prefix + ' ' + materialCoords.uri + closeLine)\n", " output.write('@prefix ' + consistCoords.prefix + ' ' + consistCoords.uri + closeLine)\n", " output.write('@prefix ' + conditionCoords.prefix + ' ' + conditionCoords.uri + closeLine)\n", " output.write('@prefix ' + hasConditionCoords.prefix + ' ' + hasConditionCoords.uri + closeLine)\n", " output.write('@prefix ' + refersCoords.prefix + ' ' + refersCoords.uri + closeLine) \n", " output.write('@prefix ' + entityCoords.prefix + ' ' + entityCoords.uri + closeLine)\n", " output.write('@prefix ' + refersHasTypeCoords.prefix + ' ' + refersHasTypeCoords.uri + closeLine)\n", " output.write('@prefix ' + hasNoteCoords.prefix + ' ' + hasNoteCoords.uri + closeLine)\n", " output.write('@prefix ' + creationCoords.prefix + ' ' + creationCoords.uri + closeLine)\n", " output.write('@prefix ' + foafCoords.prefix + ' ' + foafCoords.uri + closeLine)\n", " output.write('@prefix ' + nsCoords.prefix + ' ' + nsCoords.uri + closeLine)\n", " output.write('@prefix ' + eventCoords.prefix + ' ' + eventCoords.uri + closeLine)\n", " output.write('@prefix ' + schemaCoords.prefix + ' ' + schemaCoords.uri + closeLine)\n", " output.write('@prefix ' + rdfsCoords.prefix + ' ' + rdfsCoords.uri + closeLine)\n", " output.write('@prefix ' + owlCoords.prefix + ' ' + owlCoords.uri + closeLine)\n", " output.write('@prefix ' + aspoCoords.prefix + ' ' + aspoCoords.uri + closeLine)\n", " output.write('@prefix ' + fallsCoords.prefix + ' ' + fallsCoords.uri + closeLine)\n", " output.write('@prefix ' + residenceCoords.prefix + ' ' + residenceCoords.uri + closeLine)\n", " output.write('@prefix ' + placeCoords.prefix + ' ' + placeCoords.uri + closeLine)\n", "\n", " output.write('\\n')\n" ] }, { "cell_type": "code", "execution_count": 18, "metadata": {}, "outputs": [], "source": [ "filePrefix = 'data_'\n", "fileType = 'item'\n", "max_entries = 1000000000\n", "\n", "with open(import_dir + filePrefix + fileType + '_persone_newdataset.csv', newline=\"\") as csv_file, open(export_dir + filePrefix + fileType + '_persone_newdataset.ttl', 'w') as output:\n", " reader = csv.DictReader(csv_file)\n", " writeTTLHeader(output)\n", " first = True\n", " ii = 0\n", " for row in reader:\n", " # The index ii is used to process a limited number of entries for testing purposes\n", " ii = ii+0\n", " if (row['EAD_EAC'] == 'FALSE'): \n", " e21placeHolder = ''\n", " #e22placeHolder = \"\"\n", " e65placeHolder = \"\"\n", "\n", " #line = triple(e22placeHolder, refersCoords.prefix, e21placeHolder) + closeLine\n", " #output.write(line)\n", " if (row['gettatello'] == 'TRUE'):\n", " e55placeHolder = '\"\n", " line = triple(e21placeHolder, hasTypePCoords.prefix, e55placeHolder) + closeLine\n", " output.write(line)\n", " line = triple(e55placeHolder, labelCoords.prefix, '\\\"Gettatello\\\"') + closeLine\n", " output.write(line)\n", " \n", " # nome rilevato\n", " if(row['nome rilevato'] != ''):\n", " line = triple(e21placeHolder, hasTypeCoords.prefix, personCoords.prefix) + closeLine\n", " output.write(line)\n", " line = triple(e21placeHolder, foafCoords.prefix + 'name', '\\\"' + row['nome rilevato'] + '\\\"') + closeLine\n", " output.write(line)\n", " line = triple(e21placeHolder, labelCoords.prefix, '\\\"' + row['nome rilevato'] + '\\\"') + closeLine\n", " output.write(line)\n", " e62placeHolder = ''\n", " line = triple(e21placeHolder, noteCoords.prefix, e62placeHolder) + closeLine\n", " output.write(line)\n", " line = triple(e62placeHolder, hasTypeCoords.prefix, stringCoords.prefix) + closeLine\n", " output.write(line)\n", " line = triple(e62placeHolder, labelCoords.prefix, '\\\"Fonte: Archivio di Stato di Prato - Fondo Ospedale della Misericordia e Dolce\\\"') + closeLine\n", " output.write(line)\n", "\n", " # codice gettatello\n", " e42placeHolder = ''\n", " if(row['gettatello'] == 'TRUE' and row['codice gettatello numero'] != ''):\n", " line = triple(e21placeHolder, identifiedByCoords.prefix, e42placeHolder) + closeLine\n", " output.write(line)\n", " line = triple(e42placeHolder, hasTypeCoords.prefix, identifierCoords.prefix) + closeLine\n", " output.write(line)\n", " line = triple(e42placeHolder, labelCoords.prefix, '\\\"Matricola: ' + row['codice gettatello numero'] + '\\\"') + closeLine\n", " output.write(line)\n", " \n", " # soprannome\n", " if row['soprannome'] != '' and row['soprannome'] != ' ':\n", " #Remove all white-space characters:\n", " txt = row['soprannome']\n", " x = re.sub(\"\\n\", \" \", txt)\n", " y = re.sub(\"\\s\\s\", \"\", x)\n", " line = triple(e21placeHolder,\n", " schemaCoords.prefix + 'alternateName',\n", " '\\\"' + y + '\\\"') + closeLine\n", " output.write(line)\n", " \n", " if row['nome proprio'] != '':\n", " #Remove all white-space characters:\n", " txt = row['nome proprio']\n", " x = re.sub(\" \\n\", \"\", txt)\n", " y = re.sub(\"\\s\\s\", \"\", x)\n", " name = re.sub(\"\\n\", \"\", y)\n", " line = triple(e21placeHolder,\n", " foafCoords.prefix + 'givenName',\n", " '\\\"' + name + '\\\"') + closeLine\n", " output.write(line)\n", "\n", " if row['cognome'] != '':\n", " #Remove all white-space characters:\n", " txt = row['cognome']\n", " x = re.sub(\"\\n\", \" \", txt)\n", " y = re.sub(\"\\s\\s\", \"\", x)\n", " line = triple(e21placeHolder,\n", " foafCoords.prefix + 'familyName',\n", " '\\\"' + y + '\\\"') + closeLine\n", " output.write(line)\n", "\n", " # nome_alternativo\n", " if row['nome_alternativo'] != '': \n", " line = triple(e21placeHolder,\n", " owlCoords.prefix + 'sameAs',\n", " aspoCoords.prefix + row['nome_alternativo'].replace(' ', '_')) + closeLine\n", " output.write(line)\n", " line = triple(aspoCoords.prefix + row['nome_alternativo'].replace(' ', '_'),\n", " rdfsCoords.prefix + 'label',\n", " '\\\"' + row['nome_alternativo'] + '\\\"') + closeLine\n", " output.write(line)\n", "\n", " # genere\n", " if row['m/f'] != '':\n", " #Remove all white-space characters:\n", " txt = row['m/f']\n", " x = re.sub(\"\\n\", \" \", txt)\n", " y = re.sub(\"\\s\\s\", \"\", x)\n", " line = triple(e21placeHolder,\n", " foafCoords.prefix + 'gender',\n", " '\\\"' + y + '\\\"') + closeLine\n", " output.write(line)\n", " \n", " # patronimico\n", " if row['patronimico'] != '':\n", " #Remove all white-space characters:\n", " txt = row['patronimico']\n", " x = re.sub(\"\\n\", \" \", txt)\n", " y = re.sub(\"\\s\\s\", \"\", x)\n", " line = triple(e21placeHolder,\n", " personCoords.prefix + 'patronymicName',\n", " '\\\"' + y + '\\\"') + closeLine\n", " output.write(line)\n", " \n", " # professione \n", " if row['professione'] != '' and row['professione'] != ' ' :\n", " occupationPlaceHolder = ''\n", " line = triple(e21placeHolder,\n", " schemaCoords.prefix + 'hasOccupation',\n", " occupationPlaceHolder) + closeLine\n", " output.write(line)\n", " line = triple(occupationPlaceHolder,\n", " nsCoords.prefix + 'type',\n", " schemaCoords.prefix + 'Occupation') + closeLine\n", " output.write(line)\n", " line = triple(occupationPlaceHolder,\n", " rdfsCoords.prefix + 'label',\n", " '\\\"' + row['professione'] + '\\\"') + closeLine\n", " output.write(line)\n", " # avo 1\n", " if row['avo 1'] != '':\n", " avo1 = '\"\n", " line = triple(e21placeHolder,\n", " schemaCoords.prefix + 'relatedTo',\n", " avo1) + closeLine\n", " output.write(line)\n", " line = triple(avo1,\n", " nsCoords.prefix + 'type',\n", " foafCoords.prefix + 'Person') + closeLine\n", " output.write(line)\n", " line = triple(avo1,\n", " rdfsCoords.prefix + 'label',\n", " '\\\"' + row['avo 1'] + '\\\"') + closeLine\n", " output.write(line)\n", "\n", " if row['avo 2'] != '':\n", " avo2 = '\"\n", " line = triple(e21placeHolder,\n", " schemaCoords.prefix + 'relatedTo',\n", " avo2) + closeLine\n", " output.write(line)\n", " line = triple(avo2,\n", " nsCoords.prefix + 'type',\n", " foafCoords.prefix + 'Person') + closeLine\n", " output.write(line)\n", " line = triple(avo2,\n", " rdfsCoords.prefix + 'label',\n", " '\\\"' + row['avo 2'] + '\\\"') + closeLine\n", " output.write(line)\n", " \n", " # titolo \n", " if row['titolo'] != '':\n", " txt = row['titolo']\n", " x = re.sub(\"\\n\", \" \", txt)\n", " y = re.sub(\"\\s\\s\", \" \", x)\n", " line = triple(e21placeHolder, schemaCoords.prefix + 'honorificPrefix', '\\\"' + y + '\\\"') + closeLine\n", " output.write(line)\n", " if row['titolo 2'] != '':\n", " txt = row['titolo 2']\n", " x = re.sub(\"\\n\", \" \", txt)\n", " y = re.sub(\"\\s\\s\", \" \", x)\n", " line = triple(e21placeHolder, schemaCoords.prefix + 'honorificPrefix', '\\\"' + y + '\\\"') + closeLine\n", " output.write(line)\n", "\n", " # provenienza microtoponimo\n", " if(row['PROVENIENZA_microtoponimo'] != ''):\n", " if(row['IDASPO_GEO_PROVENIENZA_microtoponimo'] != ''):\n", " e53placeHolder = \"\"\n", " line = triple(e21placeHolder, residenceCoords.prefix, e53placeHolder) + closeLine\n", " output.write(line)\n", " else:\n", " e53placeHolder = ''\n", " line = triple(e53placeHolder, labelCoords.prefix, '\\\"' + row['PROVENIENZA_microtoponimo'] + '\\\"') + closeLine\n", " output.write(line)\n", " line = triple(e53placeHolder, hasTypeCoords.prefix, placeCoords.prefix) + closeLine\n", " output.write(line)\n", " line = triple(e21placeHolder, residenceCoords.prefix, e53placeHolder) + closeLine\n", " output.write(line)\n", " # provenienza macrotoponimo \n", " if(row['PROVENIENZA_macrotoponimo'] != '' and row['PROVENIENZA_microtoponimo'] != ''):\n", " if(row['IDASPO_GEO_PROVENIENZA_macrotoponimo'] != ''):\n", " e53mplaceHolder = \"\"\n", " line = triple(e53placeHolder, fallsCoords.prefix, e53mplaceHolder) + closeLine\n", " output.write(line)\n", " else:\n", " e53mplaceHolder = ''\n", " line = triple(e53mplaceHolder, labelCoords.prefix, '\\\"' + row['PROVENIENZA_macrotoponimo'] + '\\\"') + closeLine\n", " output.write(line)\n", " line = triple(e53mplaceHolder, hasTypeCoords.prefix, placeCoords.prefix) + closeLine\n", " output.write(line)\n", " line = triple(e53placeHolder, fallsCoords.prefix, e53mplaceHolder) + closeLine\n", " output.write(line)\n", " elif (row['PROVENIENZA_macrotoponimo'] != '' and row['PROVENIENZA_microtoponimo'] == ''):\n", " if(row['IDASPO_GEO_PROVENIENZA_macrotoponimo'] != ''):\n", " e53placeHolder = \"\"\n", " line = triple(e21placeHolder, residenceCoords.prefix, e53placeHolder) + closeLine\n", " output.write(line)\n", " else:\n", " e53placeHolder = ''\n", " line = triple(e53placeHolder, labelCoords.prefix, '\\\"' + row['PROVENIENZA_macrotoponimo'] + '\\\"') + closeLine\n", " output.write(line)\n", " line = triple(e53placeHolder, hasTypeCoords.prefix, placeCoords.prefix) + closeLine\n", " output.write(line)\n", " line = triple(e21placeHolder, residenceCoords.prefix, e53placeHolder) + closeLine\n", " output.write(line)\n", "\n", " # note\n", " if(row['note'] != ''):\n", " e62placeHolder = ''\n", " line = triple(e21placeHolder, noteCoords.prefix, e62placeHolder) + closeLine\n", " output.write(line)\n", " line = triple(e62placeHolder, hasTypeCoords.prefix, stringCoords.prefix) + closeLine\n", " output.write(line)\n", " line = triple(e62placeHolder, labelCoords.prefix, '\\\"'+ row['note'] + '\\\"') + closeLine\n", " output.write(line)\n", "\n", " output.write('\\n')\n", " #\n", " #\n", " # Limit number of entries processed (if desired)\n", " if(ii>max_entries):\n", " break\n", " " ] } ], "metadata": { "interpreter": { "hash": "31f2aee4e71d21fbe5cf8b01ff0e069b9275f58929596ceb00d14d90e3e16cd6" }, "kernelspec": { "display_name": "Python 3.7.3 64-bit", "name": "python3" }, "language_info": { "codemirror_mode": { "name": "ipython", "version": 3 }, "file_extension": ".py", "mimetype": "text/x-python", "name": "python", "nbconvert_exporter": "python", "pygments_lexer": "ipython3", "version": "3.9.0" }, "metadata": { "interpreter": { "hash": "31f2aee4e71d21fbe5cf8b01ff0e069b9275f58929596ceb00d14d90e3e16cd6" } } }, "nbformat": 4, "nbformat_minor": 2 }