{
"cells": [
{
"cell_type": "code",
"execution_count": 1,
"metadata": {},
"outputs": [],
"source": [
"# Utilities to read/write csv files\n",
"import csv\n",
"import unicodedata\n",
"# Ordered Dicts\n",
"from collections import OrderedDict\n",
"import json\n",
"\n",
"\n",
"# OPZIONAL IMPORTS\n",
"\n",
"# For timestamping/simple speed tests\n",
"from datetime import datetime\n",
"# Random number generator\n",
"from random import *\n",
"# System & command line utilities\n",
"import sys\n",
"# Json for the dictionary\n",
"import json\n",
"import re"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"# Utilities to read/write csv files\n",
"import csv\n",
"import unicodedata\n",
"# Ordered Dicts\n",
"from collections import OrderedDict\n",
"import json\n",
"\n",
"\n",
"# OPZIONAL IMPORTS\n",
"\n",
"# For timestamping/simple speed tests\n",
"from datetime import datetime\n",
"# Random number generator\n",
"from random import *\n",
"# System & command line utilities\n",
"import sys\n",
"# Json for the dictionary\n",
"import json\n",
"import re"
]
},
{
"cell_type": "code",
"execution_count": 2,
"metadata": {},
"outputs": [],
"source": [
"import_dir = '/Users/federicaspinelli/TEAMOVI/Parser/DATA/ASPO/CSV/datini/'\n",
"export_dir = '/Users/federicaspinelli/TEAMOVI/Parser/DATA/ASPO/RDF/datini/'"
]
},
{
"cell_type": "code",
"execution_count": 3,
"metadata": {},
"outputs": [],
"source": [
"# Custom class to store URIs + related infos for the ontologies/repositories\n",
"\n",
"class RDFcoords:\n",
" def __init__(self, uri, prefix, code = None):\n",
" self.uri = uri\n",
" self.prefix = prefix\n",
" self.code = code\n",
"\n",
"\n",
"# Repositories\n",
"datiniCoords = RDFcoords('', 'dt:')\n",
"personAuthCoords = RDFcoords('', 'pa:')\n",
"# W3 Predicates\n",
"hasTypeCoords = RDFcoords('', 'tp:')\n",
"labelCoords = RDFcoords('', 'lb:')\n",
"# CIDOC Predicates\n",
"identifiedByCoords = RDFcoords('', 'ib:')\n",
"hasTypePCoords = RDFcoords('', 'te:')\n",
"hasTimeSpanCoords = RDFcoords('', 'hs:')\n",
"tookPlaceCoords = RDFcoords('', 'tk:')\n",
"carriedByCoords = RDFcoords('', 'cb:')\n",
"movedByCoords = RDFcoords('', 'mb:')\n",
"movedToCoords = RDFcoords('', 'mt:')\n",
"movedFromCoords = RDFcoords('', 'mf:')\n",
"wasBroughtCoords = RDFcoords('', 'wb:')\n",
"hasProducedCoords = RDFcoords('', 'hp:')\n",
"wasProducedCoords = RDFcoords('', 'wp:')\n",
"carriesCoords = RDFcoords('', 'ca:')\n",
"hasAlternativeFormCoords = RDFcoords('', 'af:')\n",
"onGoingTCoords = RDFcoords('', 'gt:')\n",
"nsCoords = RDFcoords('', 'rdf:')\n",
"schemaCoords = RDFcoords('', 'rdfs:')\n",
"# CIDOC Objects\n",
"moveCoords = RDFcoords('', 'mv:', 'E9')\n",
"productionCoords = RDFcoords('', 'pr:', 'E12')\n",
"personCoords = RDFcoords('', 'ps:', 'E21')\n",
"manMadeObjectCoords = RDFcoords('', 'mo:', 'E22')\n",
"titleCoords = RDFcoords('', 'ti:' ,'E35')\n",
"identifierCoords = RDFcoords('', 'id:', 'E42')\n",
"timeSpanCoords = RDFcoords('', 'ts:', 'E52')\n",
"placeCoords = RDFcoords('', 'pl:', 'E53')\n",
"typeCoords = RDFcoords('', 'ty:', 'E55')\n",
"creationCoords = RDFcoords('', 'cr:', 'E65')\n",
"informationObjectCoords = RDFcoords('', 'io:', 'E73')\n",
"# New classes (subclasses of E7 Activity) - Exchange, Sending, Recive Letters\n",
"exchangeLettersCoords = RDFcoords('', 'el:', 'EL1')\n",
"sendLetterCoords = RDFcoords('', 'sl:', 'EL2')\n",
"receiveLetterCoords = RDFcoords('', 'rl:', 'EL3')"
]
},
{
"cell_type": "code",
"execution_count": 4,
"metadata": {},
"outputs": [],
"source": [
"# Basic functions for triples / shortened triples in TTL format\n",
"\n",
"def triple(subject, predicate, object1):\n",
" line = subject + ' ' + predicate + ' ' + object1\n",
" return line\n",
"\n",
"def doublet(predicate, object1):\n",
" line = ' ' + predicate + ' ' + object1\n",
" return line\n",
"\n",
"def singlet(object1):\n",
" line = ' ' + object1\n",
" return line\n",
"\n",
"# Line endings in TTL format\n",
"continueLine1 = ' ;\\n'\n",
"continueLine2 = ' ,\\n'\n",
"closeLine = ' .\\n'"
]
},
{
"cell_type": "code",
"execution_count": 5,
"metadata": {},
"outputs": [],
"source": [
"def writeTTLHeader(output):\n",
" output.write('@prefix ' + datiniCoords.prefix + ' ' + datiniCoords.uri + closeLine)\n",
" output.write('@prefix ' + personAuthCoords.prefix + ' ' + personAuthCoords.uri + closeLine)\n",
" output.write('@prefix ' + hasTypeCoords.prefix + ' ' + hasTypeCoords.uri + closeLine)\n",
" output.write('@prefix ' + labelCoords.prefix + ' ' + labelCoords.uri + closeLine)\n",
" output.write('@prefix ' + identifiedByCoords.prefix + ' ' + identifiedByCoords.uri + closeLine)\n",
" output.write('@prefix ' + hasTypePCoords.prefix + ' ' + hasTypePCoords.uri + closeLine)\n",
" output.write('@prefix ' + hasTimeSpanCoords.prefix + ' ' + hasTimeSpanCoords.uri + closeLine)\n",
" output.write('@prefix ' + tookPlaceCoords.prefix + ' ' + tookPlaceCoords.uri + closeLine) \n",
" output.write('@prefix ' + carriedByCoords.prefix + ' ' + carriedByCoords.uri + closeLine)\n",
" output.write('@prefix ' + movedByCoords.prefix + ' ' + movedByCoords.uri + closeLine)\n",
" output.write('@prefix ' + movedToCoords.prefix + ' ' + movedToCoords.uri + closeLine)\n",
" output.write('@prefix ' + movedFromCoords.prefix + ' ' + movedFromCoords.uri + closeLine) \n",
" output.write('@prefix ' + wasBroughtCoords.prefix + ' ' + wasBroughtCoords.uri + closeLine)\n",
" output.write('@prefix ' + hasProducedCoords.prefix + ' ' + hasProducedCoords.uri + closeLine)\n",
" output.write('@prefix ' + wasProducedCoords.prefix + ' ' + wasProducedCoords.uri + closeLine)\n",
" output.write('@prefix ' + carriesCoords.prefix + ' ' + carriesCoords.uri + closeLine)\n",
" output.write('@prefix ' + hasAlternativeFormCoords.prefix + ' ' + hasAlternativeFormCoords.uri + closeLine)\n",
" output.write('@prefix ' + moveCoords.prefix + ' ' + moveCoords.uri + closeLine) \n",
" output.write('@prefix ' + productionCoords.prefix + ' ' + productionCoords.uri + closeLine)\n",
" output.write('@prefix ' + personCoords.prefix + ' ' + personCoords.uri + closeLine)\n",
" output.write('@prefix ' + manMadeObjectCoords.prefix + ' ' + manMadeObjectCoords.uri + closeLine)\n",
" output.write('@prefix ' + titleCoords.prefix + ' ' + titleCoords.uri + closeLine)\n",
" output.write('@prefix ' + identifierCoords.prefix + ' ' + identifierCoords.uri + closeLine)\n",
" output.write('@prefix ' + timeSpanCoords.prefix + ' ' + timeSpanCoords.uri + closeLine)\n",
" output.write('@prefix ' + placeCoords.prefix + ' ' + placeCoords.uri + closeLine)\n",
" output.write('@prefix ' + typeCoords.prefix + ' ' + typeCoords.uri + closeLine)\n",
" output.write('@prefix ' + creationCoords.prefix + ' ' + creationCoords.uri + closeLine)\n",
" output.write('@prefix ' + informationObjectCoords.prefix + ' ' + informationObjectCoords.uri + closeLine)\n",
" output.write('@prefix ' + exchangeLettersCoords.prefix + ' ' + exchangeLettersCoords.uri + closeLine)\n",
" output.write('@prefix ' + sendLetterCoords.prefix + ' ' + sendLetterCoords.uri + closeLine)\n",
" output.write('@prefix ' + receiveLetterCoords.prefix + ' ' + receiveLetterCoords.uri + closeLine)\n",
" output.write('@prefix ' + onGoingTCoords.prefix + ' ' + onGoingTCoords.uri + closeLine)\n",
" output.write('@prefix ' + schemaCoords.prefix + ' ' + schemaCoords.uri + closeLine)\n",
" output.write('@prefix ' + nsCoords.prefix + ' ' + nsCoords.uri + closeLine)\n",
"\n",
" output.write('\\n')\n"
]
},
{
"cell_type": "code",
"execution_count": 6,
"metadata": {
"tags": []
},
"outputs": [],
"source": [
"filePrefix = 'data_'\n",
"fileType = 'item'\n",
"max_entries = 1000000000\n",
"\n",
"with open(import_dir + filePrefix + fileType + '.csv', newline=\"\") as csv_file, open(export_dir + filePrefix + fileType + '_date.ttl', 'w') as output:\n",
" reader = csv.DictReader(csv_file)\n",
" writeTTLHeader(output)\n",
" first = True\n",
" ii = 0\n",
" for row in reader:\n",
" # The index ii is used to process a limited number of entries for testing purposes\n",
" ii = ii+1\n",
" # Skip the first line as it carries info we don't want to triplify\n",
" if(first):\n",
" first = False\n",
" continue\n",
" \n",
" #Evento send letter\n",
" el2placeHolder = \"\"\n",
" el3placeHolder = \"\"\n",
"\n",
" # Data invio\n",
" if(row['data_inizio'] != ''):\n",
" e52PplaceHolder = \"\"\n",
" line = triple(el2placeHolder, e52PplaceHolder, '\\\"'+ row['data_inizio'] + '\\\"') + closeLine\n",
" output.write(line)\n",
"\n",
" if(row['data_fine'] != ''):\n",
" e52AplaceHolder = \"\"\n",
" line = triple(el3placeHolder, e52AplaceHolder, '\\\"' + row['data_fine'] + '\\\"') + closeLine\n",
" output.write(line)\n",
"\n",
" output.write('\\n')\n",
" #\n",
" #\n",
" # Limit number of entries processed (if desired)\n",
" if(ii>max_entries):\n",
" break\n",
" "
]
}
],
"metadata": {
"interpreter": {
"hash": "aee8b7b246df8f9039afb4144a1f6fd8d2ca17a180786b69acc140d282b71a49"
},
"kernelspec": {
"display_name": "Python 3.9.0 64-bit",
"name": "python3"
},
"language_info": {
"codemirror_mode": {
"name": "ipython",
"version": 3
},
"file_extension": ".py",
"mimetype": "text/x-python",
"name": "python",
"nbconvert_exporter": "python",
"pygments_lexer": "ipython3",
"version": "3.9.0"
},
"metadata": {
"interpreter": {
"hash": "31f2aee4e71d21fbe5cf8b01ff0e069b9275f58929596ceb00d14d90e3e16cd6"
}
}
},
"nbformat": 4,
"nbformat_minor": 2
}