Browse Source

Remove old parser

Federica 3 years ago
parent
commit
9bb9d72e85

+ 0 - 205
CSV_to_RDF/datini/.ipynb_checkpoints/CSV_to_RDF_datini_all-checkpoint.ipynb

@@ -1,205 +0,0 @@
-{
- "cells": [
-  {
-   "cell_type": "code",
-   "execution_count": 19,
-   "metadata": {},
-   "outputs": [],
-   "source": [
-    "# Utilities to read/write csv files\n",
-    "import csv\n",
-    "# Utilities to handle character encodings\n",
-    "import unicodedata\n",
-    "# Ordered Dicts\n",
-    "from collections import OrderedDict\n",
-    "\n",
-    "import json\n",
-    "\n",
-    "\n",
-    "# OPZIONAL IMPORTS\n",
-    "\n",
-    "# For timestamping/simple speed tests\n",
-    "from datetime import datetime\n",
-    "# Random number generator\n",
-    "from random import *\n",
-    "# System & command line utilities\n",
-    "import sys\n",
-    "# Json for the dictionary\n",
-    "import json"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": 20,
-   "metadata": {},
-   "outputs": [],
-   "source": [
-    "import_dir = '/Users/federicaspinelli/Google Drive/OVI:CNR/CSV/ASPO/datini/'\n",
-    "export_dir = '/Users/federicaspinelli/Google Drive/OVI:CNR/RDF/ASPO/datini/'"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": 21,
-   "metadata": {},
-   "outputs": [],
-   "source": [
-    "# Custom class to store URIs + related infos for the ontologies/repositories\n",
-    "\n",
-    "class RDFcoords:\n",
-    "    def __init__(self, uri, prefix, code = None):\n",
-    "        self.uri = uri\n",
-    "        self.prefix = prefix\n",
-    "        self.code = code\n",
-    "\n",
-    "\n",
-    "# Repositories\n",
-    "datiniCoords = RDFcoords('<http://datini.archiviodistato.prato.it/la-ricerca/scheda/>', 'dt:')\n",
-    "# W3/CIDOC Predicates\n",
-    "hasTypeCoords = RDFcoords('<http://www.w3.org/1999/02/22-rdf-syntax-ns#type>', 'tp:')\n",
-    "hasTypePCoords = RDFcoords('<http://www.cidoc-crm.org/cidoc-crm/P2_has_type>', 'te:')\n",
-    "carriesCoords = RDFcoords('<http://www.cidoc-crm.org/cidoc-crm/P128_carries>', 'ca:')\n",
-    "identifiedByCoords = RDFcoords('<http://www.cidoc-crm.org/cidoc-crm/P1_is_identified_by>', 'ib:')\n",
-    "labelCoords = RDFcoords('<http://www.w3.org/2000/01/rdf-schema#label>', 'lb:')\n",
-    "\n",
-    "# CIDOC Objects\n",
-    "manMadeObjectCoords = RDFcoords('<http://www.cidoc-crm.org/cidoc-crm/E22_Man-Made_Object>', 'mo:', 'E22')\n",
-    "informationObjectCoords = RDFcoords('<http://www.cidoc-crm.org/cidoc-crm/E73_Information_Object>', 'io:', 'E73')\n",
-    "titleCoords = RDFcoords('<http://www.cidoc-crm.org/cidoc-crm/E35_Title>', 'ti:' ,'E35')\n",
-    "placeAppellationCoords = RDFcoords('<http://www.cidoc-crm.org/cidoc-crm/E44_Place_appellation>', 'pa:', 'E44')\n",
-    "identifierCoords = RDFcoords('<http://www.cidoc-crm.org/cidoc-crm/E42_Identifier>', 'id:', 'E42')\n",
-    "typeCoords = RDFcoords('<http://www.cidoc-crm.org/cidoc-crm/E55_Type>', 'ty:', 'E55')"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": 22,
-   "metadata": {},
-   "outputs": [],
-   "source": [
-    "# Basic functions for triples / shortened triples in TTL format\n",
-    "\n",
-    "def triple(subject, predicate, object1):\n",
-    "    line = subject + ' ' + predicate + ' ' + object1\n",
-    "    return line\n",
-    "\n",
-    "def doublet(predicate, object1):\n",
-    "    line = '    ' + predicate + ' ' + object1\n",
-    "    return line\n",
-    "\n",
-    "def singlet(object1):\n",
-    "    line = '        ' + object1\n",
-    "    return line\n",
-    "\n",
-    "# Line endings in TTL format\n",
-    "continueLine1 = ' ;\\n'\n",
-    "continueLine2 = ' ,\\n'\n",
-    "closeLine = ' .\\n'"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": 23,
-   "metadata": {},
-   "outputs": [],
-   "source": [
-    "def writeTTLHeader(output):\n",
-    "    output.write('@prefix ' + datiniCoords.prefix + ' ' + datiniCoords.uri + closeLine)\n",
-    "    output.write('@prefix ' + hasTypeCoords.prefix + ' ' + hasTypeCoords.uri + closeLine)\n",
-    "    output.write('@prefix ' + hasTypePCoords.prefix + ' ' + hasTypePCoords.uri + closeLine)\n",
-    "    output.write('@prefix ' + manMadeObjectCoords.prefix + ' ' + manMadeObjectCoords.uri + closeLine)\n",
-    "    output.write('@prefix ' + carriesCoords.prefix + ' ' + carriesCoords.uri + closeLine)\n",
-    "    output.write('@prefix ' + informationObjectCoords.prefix + ' ' + informationObjectCoords.uri + closeLine)\n",
-    "    output.write('@prefix ' + identifiedByCoords.prefix + ' ' + identifiedByCoords.uri + closeLine)\n",
-    "    output.write('@prefix ' + titleCoords.prefix + ' ' + titleCoords.uri + closeLine)\n",
-    "    output.write('@prefix ' + labelCoords.prefix + ' ' + labelCoords.uri + closeLine)\n",
-    "    output.write('@prefix ' + identifierCoords.prefix + ' ' + identifierCoords.uri + closeLine)\n",
-    "    output.write('@prefix ' + typeCoords.prefix + ' ' + typeCoords.uri + closeLine)\n",
-    "    output.write('\\n')\n"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": 24,
-   "metadata": {},
-   "outputs": [],
-   "source": [
-    "filePrefix = 'data_'\n",
-    "fileType = 'subfonds'\n",
-    "max_entries = 1000000000\n",
-    "\n",
-    "with open(import_dir + filePrefix + fileType + '.csv', newline=\"\") as csv_file, open(export_dir + filePrefix + fileType + '.ttl', 'w') as output:\n",
-    "    reader = csv.DictReader(csv_file)\n",
-    "    writeTTLHeader(output)\n",
-    "    first = True\n",
-    "    ii = 0\n",
-    "    for row in reader:\n",
-    "        # The index ii is used to process a limited number of entries for testing purposes\n",
-    "        ii = ii+1\n",
-    "        # Skip the first line as it carries info we don't want to triplify\n",
-    "        if(first):\n",
-    "            first = False\n",
-    "            continue\n",
-    "        # Write E22 Man Made Object & E73 Information Object -- should exist for every entry?\n",
-    "        line = triple(datiniCoords.prefix + row['id'], hasTypeCoords.prefix, manMadeObjectCoords.prefix) + closeLine\n",
-    "        output.write(line)\n",
-    "        line = triple(datiniCoords.prefix + row['id'], labelCoords.prefix, '\\\"Documento fisico: ' + row['titolo_aspo'].replace('\\\\','\\\\\\\\').replace('\"','\\\\\"')+ '\\\"') + closeLine\n",
-    "        output.write(line)\n",
-    "        e37placeHolder = \"<http://datini.archiviodistato.prato.it/la-ricerca/scheda/\" + row['id'] + \"/\" + informationObjectCoords.code + \">\"\n",
-    "        line = triple(datiniCoords.prefix + row['id'], carriesCoords.prefix, e37placeHolder) + closeLine\n",
-    "        output.write(line)\n",
-    "        line = triple(e37placeHolder, hasTypeCoords.prefix, informationObjectCoords.prefix) + closeLine\n",
-    "        output.write(line)\n",
-    "        line = triple(e37placeHolder, labelCoords.prefix, '\\\"Contenuto informativo: ' + row['titolo_aspo'].replace('\\\\','\\\\\\\\').replace('\"','\\\\\"')+ '\\\"') + closeLine\n",
-    "        output.write(line)\n",
-    "        #\n",
-    "        # If the 'titolo_aspo' property is not empty for the given entry, write down title-related triples\n",
-    "        if(row['titolo_aspo'] != 'None'):\n",
-    "            e35placeHolder1 = \"<http://datini.archiviodistato.prato.it/la-ricerca/scheda/\" + row['id'] + \"/\" + titleCoords.code + \">\"\n",
-    "            line = triple(e37placeHolder, identifiedByCoords.prefix, e35placeHolder1) + closeLine\n",
-    "            output.write(line)\n",
-    "            line = triple(e35placeHolder1, hasTypeCoords.prefix, titleCoords.prefix) + closeLine\n",
-    "            output.write(line)\n",
-    "            line = triple(e35placeHolder1, labelCoords.prefix, '\\\"' + row['titolo_aspo'].replace('\\\\','\\\\\\\\').replace('\"','\\\\\"')+ '\\\"') + closeLine\n",
-    "            output.write(line)\n",
-    "                        \n",
-    "        output.write('\\n')\n",
-    "        #\n",
-    "        #\n",
-    "        # Limit number of entries processed (if desired)\n",
-    "        if(ii>max_entries):\n",
-    "            break\n",
-    "        "
-   ]
-  }
- ],
- "metadata": {
-  "interpreter": {
-   "hash": "31f2aee4e71d21fbe5cf8b01ff0e069b9275f58929596ceb00d14d90e3e16cd6"
-  },
-  "kernelspec": {
-   "display_name": "Python 3",
-   "language": "python",
-   "name": "python3"
-  },
-  "language_info": {
-   "codemirror_mode": {
-    "name": "ipython",
-    "version": 3
-   },
-   "file_extension": ".py",
-   "mimetype": "text/x-python",
-   "name": "python",
-   "nbconvert_exporter": "python",
-   "pygments_lexer": "ipython3",
-   "version": "3.8.10"
-  },
-  "metadata": {
-   "interpreter": {
-    "hash": "31f2aee4e71d21fbe5cf8b01ff0e069b9275f58929596ceb00d14d90e3e16cd6"
-   }
-  }
- },
- "nbformat": 4,
- "nbformat_minor": 2
-}