{ "cells": [ { "cell_type": "code", "execution_count": 25, "metadata": {}, "outputs": [], "source": [ "# Utilities to read/write csv files\n", "import csv\n", "import unicodedata\n", "# Ordered Dicts\n", "from collections import OrderedDict\n", "import json\n", "\n", "# OPZIONAL IMPORTS\n", "\n", "# For timestamping/simple speed tests\n", "from datetime import datetime\n", "# Random number generator\n", "from random import *\n", "# System & command line utilities\n", "import sys\n", "# Json for the dictionary\n", "import json\n", "import re" ] }, { "cell_type": "code", "execution_count": 26, "metadata": {}, "outputs": [], "source": [ "import_dir = '/Users/federicaspinelli/TEAMOVI/Parser/DATA/ASPO/DATE/CORRETTE/OSPEDALE/CSV/'\n", "export_dir = '/Users/federicaspinelli/TEAMOVI/Parser/DATA/ASPO/DATE/CORRETTE/OSPEDALE/RDF/'" ] }, { "cell_type": "code", "execution_count": 27, "metadata": {}, "outputs": [], "source": [ "# Custom class to store URIs + related infos for the ontologies/repositories\n", "\n", "class RDFcoords:\n", " def __init__(self, uri, prefix, code = None):\n", " self.uri = uri\n", " self.prefix = prefix\n", " self.code = code\n", "\n", "\n", "# Repositories\n", "datiniCoords = RDFcoords('', 'dt:')\n", "personAuthCoords = RDFcoords('', 'pa:')\n", "# W3 Predicates\n", "hasTypeCoords = RDFcoords('', 'tp:')\n", "labelCoords = RDFcoords('', 'lb:')\n", "# CIDOC Predicates\n", "identifiedByCoords = RDFcoords('', 'ib:')\n", "hasTypePCoords = RDFcoords('', 'te:')\n", "hasTimeSpanCoords = RDFcoords('', 'hs:')\n", "tookPlaceCoords = RDFcoords('', 'tk:')\n", "carriedByCoords = RDFcoords('', 'cb:')\n", "wasBroughtCoords = RDFcoords('', 'wb:')\n", "hasProducedCoords = RDFcoords('', 'hp:')\n", "wasProducedCoords = RDFcoords('', 'wp:')\n", "carriesCoords = RDFcoords('', 'ca:')\n", "hasAlternativeFormCoords = RDFcoords('', 'af:')\n", "onGoingTCoords = RDFcoords('', 'gt:')\n", "nsCoords = RDFcoords('', 'rdf:')\n", "schemaCoords = RDFcoords('', 'rdfs:')\n", "yearCoords = RDFcoords('', 'year:')\n", "monthCoords = RDFcoords('', 'month:')\n", "dayCoords = RDFcoords('', 'day:')\n", "# CIDOC Objects\n", "productionCoords = RDFcoords('', 'pr:', 'E12')\n", "personCoords = RDFcoords('', 'ps:', 'E21')\n", "manMadeObjectCoords = RDFcoords('', 'mo:', 'E22')\n", "titleCoords = RDFcoords('', 'ti:' ,'E35')\n", "identifierCoords = RDFcoords('', 'id:', 'E42')\n", "timeSpanCoords = RDFcoords('', 'ts:', 'E52')\n", "placeCoords = RDFcoords('', 'pl:', 'E53')\n", "typeCoords = RDFcoords('', 'ty:', 'E55')\n", "creationCoords = RDFcoords('', 'cr:', 'E65')\n", "informationObjectCoords = RDFcoords('', 'io:', 'E73')\n", "sendLetterCoords = RDFcoords('', 'sl:', 'EL2')\n", "receiveLetterCoords = RDFcoords('', 'rl:', 'EL3')\n", "xsdCoords = RDFcoords('', 'xsd:')\n", "cidocCoords = RDFcoords('', 'crm:')\n", "beginningCoords = RDFcoords('', 'beg:')\n", "endCoords = RDFcoords('', 'end:')" ] }, { "cell_type": "markdown", "metadata": {}, "source": [ "- year: https://www.w3.org/TR/owl-time#year\n", "- month: https://www.w3.org/TR/owl-time#month\n", "- day: https://www.w3.org/TR/owl-time#day" ] }, { "cell_type": "code", "execution_count": 28, "metadata": {}, "outputs": [], "source": [ "# Basic functions for triples / shortened triples in TTL format\n", "\n", "def triple(subject, predicate, object1):\n", " line = subject + ' ' + predicate + ' ' + object1\n", " return line\n", "\n", "def doublet(predicate, object1):\n", " line = ' ' + predicate + ' ' + object1\n", " return line\n", "\n", "def singlet(object1):\n", " line = ' ' + object1\n", " return line\n", "\n", "# Line endings in TTL format\n", "continueLine1 = ' ;\\n'\n", "continueLine2 = ' ,\\n'\n", "closeLine = ' .\\n'" ] }, { "cell_type": "code", "execution_count": 29, "metadata": {}, "outputs": [], "source": [ "def writeTTLHeader(output):\n", " output.write('@prefix ' + datiniCoords.prefix + ' ' + datiniCoords.uri + closeLine)\n", " output.write('@prefix ' + personAuthCoords.prefix + ' ' + personAuthCoords.uri + closeLine)\n", " output.write('@prefix ' + hasTypeCoords.prefix + ' ' + hasTypeCoords.uri + closeLine)\n", " output.write('@prefix ' + labelCoords.prefix + ' ' + labelCoords.uri + closeLine)\n", " output.write('@prefix ' + identifiedByCoords.prefix + ' ' + identifiedByCoords.uri + closeLine)\n", " output.write('@prefix ' + hasTypePCoords.prefix + ' ' + hasTypePCoords.uri + closeLine)\n", " output.write('@prefix ' + hasTimeSpanCoords.prefix + ' ' + hasTimeSpanCoords.uri + closeLine)\n", " output.write('@prefix ' + tookPlaceCoords.prefix + ' ' + tookPlaceCoords.uri + closeLine) \n", " output.write('@prefix ' + carriedByCoords.prefix + ' ' + carriedByCoords.uri + closeLine) \n", " output.write('@prefix ' + wasBroughtCoords.prefix + ' ' + wasBroughtCoords.uri + closeLine)\n", " output.write('@prefix ' + hasProducedCoords.prefix + ' ' + hasProducedCoords.uri + closeLine)\n", " output.write('@prefix ' + wasProducedCoords.prefix + ' ' + wasProducedCoords.uri + closeLine)\n", " output.write('@prefix ' + carriesCoords.prefix + ' ' + carriesCoords.uri + closeLine)\n", " output.write('@prefix ' + hasAlternativeFormCoords.prefix + ' ' + hasAlternativeFormCoords.uri + closeLine)\n", " output.write('@prefix ' + productionCoords.prefix + ' ' + productionCoords.uri + closeLine)\n", " output.write('@prefix ' + personCoords.prefix + ' ' + personCoords.uri + closeLine)\n", " output.write('@prefix ' + manMadeObjectCoords.prefix + ' ' + manMadeObjectCoords.uri + closeLine)\n", " output.write('@prefix ' + titleCoords.prefix + ' ' + titleCoords.uri + closeLine)\n", " output.write('@prefix ' + identifierCoords.prefix + ' ' + identifierCoords.uri + closeLine)\n", " output.write('@prefix ' + timeSpanCoords.prefix + ' ' + timeSpanCoords.uri + closeLine)\n", " output.write('@prefix ' + placeCoords.prefix + ' ' + placeCoords.uri + closeLine)\n", " output.write('@prefix ' + typeCoords.prefix + ' ' + typeCoords.uri + closeLine)\n", " output.write('@prefix ' + creationCoords.prefix + ' ' + creationCoords.uri + closeLine)\n", " output.write('@prefix ' + informationObjectCoords.prefix + ' ' + informationObjectCoords.uri + closeLine)\n", " output.write('@prefix ' + onGoingTCoords.prefix + ' ' + onGoingTCoords.uri + closeLine)\n", " output.write('@prefix ' + schemaCoords.prefix + ' ' + schemaCoords.uri + closeLine)\n", " output.write('@prefix ' + nsCoords.prefix + ' ' + nsCoords.uri + closeLine)\n", " output.write('@prefix ' + sendLetterCoords.prefix + ' ' + sendLetterCoords.uri + closeLine)\n", " output.write('@prefix ' + receiveLetterCoords.prefix + ' ' + receiveLetterCoords.uri + closeLine)\n", " output.write('@prefix ' + yearCoords.prefix + ' ' + yearCoords.uri + closeLine) \n", " output.write('@prefix ' + monthCoords.prefix + ' ' + monthCoords.uri + closeLine)\n", " output.write('@prefix ' + dayCoords.prefix + ' ' + dayCoords.uri + closeLine)\n", " output.write('@prefix ' + xsdCoords.prefix + ' ' + xsdCoords.uri + closeLine)\n", " output.write('@prefix ' + cidocCoords.prefix + ' ' + cidocCoords.uri + closeLine)\n", "\n", " output.write('@prefix ' + beginningCoords.prefix + ' ' + beginningCoords.uri + closeLine)\n", " output.write('@prefix ' + endCoords.prefix + ' ' + endCoords.uri + closeLine)\n", " output.write('\\n')\n" ] }, { "cell_type": "code", "execution_count": 30, "metadata": { "tags": [] }, "outputs": [], "source": [ "filePrefix = 'data_'\n", "fileType = 'otherlevel_ospedale_date'\n", "max_entries = 1000000000\n", "with open(import_dir + filePrefix + fileType + '.csv', newline=\"\") as csv_file, open(export_dir + filePrefix + fileType + '_range.ttl', 'w') as output:\n", " reader = csv.DictReader(csv_file)\n", " writeTTLHeader(output)\n", " first = True\n", " ii = 0\n", " for row in reader:\n", " # The index ii is used to process a limited number of entries for testing purposes\n", " ii = ii+1\n", " # Skip the first line as it carries info we don't want to triplify\n", " if(first):\n", " first = False\n", " continue\n", " # E65 Creation - E52 Time Span\n", " if(row['data_periodo_normalizzata_inizio'] != '' and row['data_periodo_normalizzata_inizio'] != 'NOTNORMAL'):\n", " e52placeHolder = \"\"\n", " if(row['data_periodo_normalizzata_inizio'] != 'Senza data'): \n", " year = row['data_periodo_normalizzata_inizio'][0:4]\n", " month = row['data_periodo_normalizzata_inizio'][4:6]\n", " day = row['data_periodo_normalizzata_inizio'][6:8]\n", " if(year != '****' and year != '9998'):\n", " line = triple(e52placeHolder, yearCoords.prefix, '\\\"'+year+'\\\"^^xsd:integer') + closeLine\n", " output.write(line)\n", " if (month != '**' and month != '99'):\n", " line = triple(e52placeHolder, monthCoords.prefix, '\\\"'+month+ '\\\"^^xsd:integer') + closeLine \n", " output.write(line)\n", " if (day != '**' and day != '98'):\n", " line = triple(e52placeHolder, dayCoords.prefix, '\\\"'+day+ '\\\"^^xsd:integer') + closeLine\n", " output.write(line)\n", " else:\n", " line = triple(e52placeHolder, cidocCoords.prefix + 'P3_has_note', '\\\"Data incompleta\\\"') + closeLine\n", " output.write(line)\n", " line = triple(e52placeHolder, beginningCoords.prefix, '\\\"'+row['data_periodo_normalizzata_inizio']+'\\\"^^xsd:date') + closeLine\n", " output.write(line)\n", " line = triple(e52placeHolder, endCoords.prefix, '\\\"'+row['data_periodo_normalizzata_inizio']+'\\\"^^xsd:date') + closeLine\n", " output.write(line)\n", " \n", " if(row['data_periodo_normalizzata_fine'] != '' and row['data_periodo_normalizzata_fine'] != 'NOTNORMAL'):\n", " if(row['data_periodo_normalizzata_fine'] != 'Senza data'):\n", " e52FplaceHolder = \"\"\n", " year = row['data_periodo_normalizzata_fine'][0:4]\n", " month = row['data_periodo_normalizzata_fine'][4:6]\n", " day = row['data_periodo_normalizzata_fine'][6:8]\n", " if(year != '****' and year != '9998'):\n", " line = triple(e52FplaceHolder, yearCoords.prefix, '\\\"'+year+'\\\"^^xsd:integer') + closeLine\n", " output.write(line)\n", " if (month != '**' and month != '99'):\n", " line = triple(e52FplaceHolder, monthCoords.prefix, '\\\"'+month+ '\\\"^^xsd:integer') + closeLine\n", " output.write(line)\n", " if (day != '**' and day != '98'):\n", " line = triple(e52FplaceHolder, dayCoords.prefix, '\\\"'+day+ '\\\"^^xsd:integer') + closeLine\n", " output.write(line)\n", " else:\n", " line = triple(e52FplaceHolder, cidocCoords.prefix + 'P3_has_note', '\\\"Data incompleta\\\"') + closeLine\n", " output.write(line)\n", " \n", " # line = triple(e65FplaceHolder, hasTimeSpanCoords.prefix, e52FplaceHolder) + closeLine\n", " # output.write(line)\n", " line = triple(e52FplaceHolder, hasTypeCoords.prefix, timeSpanCoords.prefix) + closeLine\n", " output.write(line)\n", " line = triple(e52FplaceHolder, labelCoords.prefix, '\\\"'+year+'\\\"') + closeLine\n", " output.write(line)\n", " line = triple(e52FplaceHolder, endCoords.prefix, '\\\"'+row['data_periodo_normalizzata_fine']+'\\\"^^xsd:date') + closeLine\n", " output.write(line)\n", " line = triple(e52FplaceHolder, beginningCoords.prefix, '\\\"'+row['data_periodo_normalizzata_fine']+'\\\"^^xsd:date') + closeLine\n", " output.write(line)\n", " \n", " # \n", " # else:\n", " # if(row['data_periodo_normalizzata_inizio'] != ''):\n", " # if(row['data_periodo_normalizzata_inizio'] != 'Senza data'): \n", " # e52PplaceHolder = \"\"\n", " # line = triple(e52PplaceHolder, beginningCoords.prefix, '\\\"'+row['data_periodo_normalizzata_inizio']+'\\\"^^xsd:date') + closeLine\n", " # output.write(line)\n", " # if(row['data_periodo_normalizzata_fine'] != ''):\n", " # if(row['data_periodo_normalizzata_fine'] != 'Senza data'):\n", " # e52AplaceHolder = \"\"\n", " # line = triple(e52AplaceHolder, endCoords.prefix, '\\\"'+row['data_periodo_normalizzata_fine']+'\\\"^^xsd:date') + closeLine\n", " # output.write(line)\n", "\n", " output.write('\\n')\n", " # Limit number of entries processed (if desired)\n", " if(ii>max_entries):\n", " break" ] } ], "metadata": { "interpreter": { "hash": "aee8b7b246df8f9039afb4144a1f6fd8d2ca17a180786b69acc140d282b71a49" }, "kernelspec": { "display_name": "Python 3.9.0 64-bit", "name": "python3" }, "language_info": { "codemirror_mode": { "name": "ipython", "version": 3 }, "file_extension": ".py", "mimetype": "text/x-python", "name": "python", "nbconvert_exporter": "python", "pygments_lexer": "ipython3", "version": "3.10.4" }, "metadata": { "interpreter": { "hash": "31f2aee4e71d21fbe5cf8b01ff0e069b9275f58929596ceb00d14d90e3e16cd6" } } }, "nbformat": 4, "nbformat_minor": 2 }