{ "cells": [ { "cell_type": "code", "execution_count": 13, "metadata": {}, "outputs": [], "source": [ "# Utilities to read/write csv files\n", "import csv\n", "import unicodedata\n", "# Ordered Dicts\n", "from collections import OrderedDict\n", "import json\n", "\n", "\n", "# OPZIONAL IMPORTS\n", "\n", "# For timestamping/simple speed tests\n", "from datetime import datetime\n", "# Random number generator\n", "from random import *\n", "# System & command line utilities\n", "import sys\n", "# Json for the dictionary\n", "import json" ] }, { "cell_type": "code", "execution_count": 14, "metadata": {}, "outputs": [], "source": [ <<<<<<< HEAD "import_dir = '.../TEAMOVI/Parser/DATA/OVI/CSV/'\n", "export_dir = '.../TEAMOVI/Parser/DATA/OVI/RDF/'" ======= "import_dir = '/Users/federicaspinelli/Google Drive/OVI-CNR/CSV/OVI/'\n", "export_dir = '/Users/federicaspinelli/Google Drive/OVI-CNR/RDF/OVI/'" >>>>>>> 8d169b35cf5076a25cc06d78d6398a20ca232c1c ] }, { "cell_type": "code", "execution_count": 15, "metadata": {}, "outputs": [], "source": [ "# Custom class to store URIs + related infos for the ontologies/repositories\n", "\n", "class RDFcoords:\n", " def __init__(self, uri, prefix, code = None):\n", " self.uri = uri\n", " self.prefix = prefix\n", " self.code = code\n", "\n", "\n", "# Repositories\n", "datiniCoords = RDFcoords('', 'dt:')\n", "personAuthCoords = RDFcoords('', 'pa:')\n", "# W3/CIDOC Predicates\n", "hasTypeCoords = RDFcoords('', 'tp:')\n", "hasTypePCoords = RDFcoords('', 'te:')\n", "carriesCoords = RDFcoords('', 'ca:')\n", "identifiedByCoords = RDFcoords('', 'ib:')\n", "labelCoords = RDFcoords('', 'lb:')\n", "wasBroughtCoords = RDFcoords('', 'wb:')\n", "carriedByCoords = RDFcoords('', 'cb:')\n", "hasAlternativeFormCoords = RDFcoords('', 'af:')\n", "hasNoteCoords = RDFcoords('', 'no:')\n", "hasTypeNCoords = RDFcoords('', 'tn:')\n", "hasLanguageCoords = RDFcoords('', 'hl:')\n", "documentsCoords = RDFcoords('', 'ds:')\n", "hasComponentCoords = RDFcoords('', 'hc:')\n", "\n", "# CIDOC Objects\n", "manMadeObjectCoords = RDFcoords('', 'mo:', 'E22')\n", "informationObjectCoords = RDFcoords('', 'io:', 'E73')\n", "titleCoords = RDFcoords('', 'ti:' ,'E35')\n", "placeAppellationCoords = RDFcoords('', 'pa:', 'E44')\n", "identifierCoords = RDFcoords('', 'id:', 'E42')\n", "typeCoords = RDFcoords('', 'ty:', 'E55')\n", "creationCoords = RDFcoords('', 'cr:', 'E65')\n", "personCoords = RDFcoords('', 'ps:', 'E21')\n", "stringCoords = RDFcoords('', 'sr:', 'E62')\n", "linguisticObjCoords = RDFcoords('', 'lj:', 'E33')\n", "languageCoords = RDFcoords('', 'ln:', 'E56')\n", "appellationCoords = RDFcoords('', 'ap:', 'E41')\n", "propositionalObjCoords = RDFcoords('', 'pj:', 'E89')\n" ] }, { "cell_type": "code", "execution_count": 16, "metadata": {}, "outputs": [], "source": [ "# Basic functions for triples / shortened triples in TTL format\n", "\n", "def triple(subject, predicate, object1):\n", " line = subject + ' ' + predicate + ' ' + object1\n", " return line\n", "\n", "def doublet(predicate, object1):\n", " line = ' ' + predicate + ' ' + object1\n", " return line\n", "\n", "def singlet(object1):\n", " line = ' ' + object1\n", " return line\n", "\n", "# Line endings in TTL format\n", "continueLine1 = ' ;\\n'\n", "continueLine2 = ' ,\\n'\n", "closeLine = ' .\\n'" ] }, { "cell_type": "code", "execution_count": 17, "metadata": {}, "outputs": [], "source": [ "def writeTTLHeader(output):\n", " output.write('@prefix ' + datiniCoords.prefix + ' ' + datiniCoords.uri + closeLine)\n", " output.write('@prefix ' + personAuthCoords.prefix + ' ' + personAuthCoords.uri + closeLine)\n", " output.write('@prefix ' + hasTypeCoords.prefix + ' ' + hasTypeCoords.uri + closeLine)\n", " output.write('@prefix ' + hasTypePCoords.prefix + ' ' + hasTypePCoords.uri + closeLine)\n", " output.write('@prefix ' + manMadeObjectCoords.prefix + ' ' + manMadeObjectCoords.uri + closeLine)\n", " output.write('@prefix ' + carriesCoords.prefix + ' ' + carriesCoords.uri + closeLine)\n", " output.write('@prefix ' + informationObjectCoords.prefix + ' ' + informationObjectCoords.uri + closeLine)\n", " output.write('@prefix ' + identifiedByCoords.prefix + ' ' + identifiedByCoords.uri + closeLine)\n", " output.write('@prefix ' + titleCoords.prefix + ' ' + titleCoords.uri + closeLine)\n", " output.write('@prefix ' + labelCoords.prefix + ' ' + labelCoords.uri + closeLine)\n", " output.write('@prefix ' + identifierCoords.prefix + ' ' + identifierCoords.uri + closeLine)\n", " output.write('@prefix ' + wasBroughtCoords.prefix + ' ' + wasBroughtCoords.uri + closeLine)\n", " output.write('@prefix ' + typeCoords.prefix + ' ' + typeCoords.uri + closeLine)\n", " output.write('@prefix ' + carriedByCoords.prefix + ' ' + carriedByCoords.uri + closeLine)\n", " output.write('@prefix ' + personCoords.prefix + ' ' + personCoords.uri + closeLine)\n", " output.write('@prefix ' + hasAlternativeFormCoords.prefix + ' ' + hasAlternativeFormCoords.uri + closeLine)\n", " output.write('@prefix ' + hasNoteCoords.prefix + ' ' + hasNoteCoords.uri + closeLine)\n", " output.write('@prefix ' + hasTypeNCoords.prefix + ' ' + hasTypeNCoords.uri + closeLine)\n", " output.write('@prefix ' + stringCoords.prefix + ' ' + stringCoords.uri + closeLine)\n", " output.write('@prefix ' + linguisticObjCoords.prefix + ' ' + linguisticObjCoords.uri + closeLine)\n", " output.write('@prefix ' + languageCoords.prefix + ' ' + languageCoords.uri + closeLine)\n", " output.write('@prefix ' + hasLanguageCoords.prefix + ' ' + hasLanguageCoords.uri + closeLine)\n", " output.write('@prefix ' + documentsCoords.prefix + ' ' + documentsCoords.uri + closeLine)\n", " output.write('@prefix ' + appellationCoords.prefix + ' ' + appellationCoords.uri + closeLine)\n", " output.write('@prefix ' + propositionalObjCoords.prefix + ' ' + propositionalObjCoords.uri + closeLine)\n", " output.write('@prefix ' + hasComponentCoords.prefix + ' ' + hasComponentCoords.uri + closeLine)\n", " output.write('\\n')\n" ] }, { "cell_type": "code", "execution_count": 18, "metadata": { "tags": [] }, "outputs": [], "source": [ "filePrefix = 'Biblio'\n", "fileType = 'Datini'\n", <<<<<<< HEAD "max_entries = 10000\n", ======= "max_entries = 10000000000000000\n", >>>>>>> 8d169b35cf5076a25cc06d78d6398a20ca232c1c "\n", "with open(import_dir + filePrefix + fileType + '_IDAspo.csv', newline=\"\") as csv_file, open(export_dir + filePrefix + fileType + '_edition.ttl', 'w') as output:\n", " reader = csv.DictReader(csv_file)\n", " writeTTLHeader(output)\n", " first = True\n", " ii = 0\n", " for row in reader:\n", " # The index ii is used to process a limited number of entries for testing purposes\n", " ii = ii+0\n", " # Skip the first line as it carries info we don't want to triplify\n", " if(first):\n", " first = False\n", " continue\n", " \n", " if(row['num_ovi'] != '2'):\n", " e73placeHolder = \">>>>>> 8d169b35cf5076a25cc06d78d6398a20ca232c1c " e73placeHolderE = e73placeHolder + \"_ED>\" \n", " line = triple(e73placeHolderE, documentsCoords.prefix, e73placeHolder + \">\") + closeLine\n", " output.write(line)\n", " line = triple(e73placeHolderE, labelCoords.prefix, '\\\"Edizione\\\"') + closeLine\n", " output.write(line)\n", " e41placeHolder = e73placeHolder + \"_ED\" + \"_\" + appellationCoords.code + \">\"\n", " line = triple(e73placeHolderE , identifiedByCoords.prefix, e41placeHolder) + closeLine\n", " output.write(line)\n", " line = triple(e41placeHolder, hasTypeCoords.prefix, appellationCoords.prefix) + closeLine\n", " output.write(line) \n", " line = triple(e41placeHolder, labelCoords.prefix, '\\\"' + row['edizione'].replace('\\\\','\\\\\\\\').replace('\"','\\\\\"') + '\\\"') + closeLine\n", " output.write(line)\n", "\n", " #edizione abbreviata\n", " e41placeHolder2 = e73placeHolder + \"_ED_AB>\"\n", " line = triple(e41placeHolder, hasAlternativeFormCoords.prefix, e41placeHolder2) + closeLine\n", " output.write(line)\n", " line = triple(e41placeHolder2, labelCoords.prefix, '\\\"' + row['edizione_abbr'].replace('\\\\','\\\\\\\\').replace('\"','\\\\\"') + '\\\"') + closeLine\n", " output.write(line)\n", " e41e55placeHolder2 = e73placeHolder + \"_ED_AB_E55>\"\n", " line = triple(e41placeHolder2, hasTypePCoords.prefix, e41e55placeHolder2) + closeLine\n", " output.write(line)\n", " line = triple(e41e55placeHolder2, labelCoords.prefix, '\\\"Edizione abbreviata\\\"') + closeLine\n", " output.write(line)\n", "\n", " #raccolta\n", " e89placeHolder = e73placeHolder + \"_\" + propositionalObjCoords.code + \">\"\n", " line = triple(e89placeHolder, hasComponentCoords.prefix, e73placeHolder+ \">\") + closeLine\n", " output.write(line)\n", " line = triple(e89placeHolder, hasTypeCoords.prefix, propositionalObjCoords.prefix) + closeLine\n", " output.write(line) \n", " line = triple(e89placeHolder, labelCoords.prefix, '\\\"' + row['raccolta'].replace('\\\\','\\\\\\\\').replace('\"','\\\\\"') + '\\\"') + closeLine\n", " output.write(line)\n", " e89e55placeHolder = e73placeHolder + \"_\" + propositionalObjCoords.code + \"_E55>\"\n", " line = triple(e89placeHolder, hasTypePCoords.prefix, e89e55placeHolder) + closeLine\n", " output.write(line)\n", " line = triple(e89e55placeHolder, labelCoords.prefix, '\\\"Raccolta\\\"') + closeLine\n", " output.write(line)\n", "\n", " output.write('\\n')\n", "\n", "\n", " # Limit number of entries processed (if desired)\n", " if(ii>max_entries):\n", " break\n", " " ] } ], "metadata": { "interpreter": { "hash": "aee8b7b246df8f9039afb4144a1f6fd8d2ca17a180786b69acc140d282b71a49" }, "kernelspec": { "display_name": "Python 3.9.0 64-bit", "name": "python3" }, "language_info": { "codemirror_mode": { "name": "ipython", "version": 3 }, "file_extension": ".py", "mimetype": "text/x-python", "name": "python", "nbconvert_exporter": "python", "pygments_lexer": "ipython3", <<<<<<< HEAD "version": "3.10.4" ======= "version": "3.9.0" >>>>>>> 8d169b35cf5076a25cc06d78d6398a20ca232c1c }, "metadata": { "interpreter": { "hash": "31f2aee4e71d21fbe5cf8b01ff0e069b9275f58929596ceb00d14d90e3e16cd6" } } }, "nbformat": 4, "nbformat_minor": 2 }