Browse Source

Delete duplicate Parser

Federica 2 years ago
parent
commit
d7d70ff50c
1 changed files with 0 additions and 191 deletions
  1. 0 191
      CSV_to_RDF/datini/CSV_to_RDF_datini_file_no_node.ipynb

+ 0 - 191
CSV_to_RDF/datini/CSV_to_RDF_datini_file_no_node.ipynb

@@ -1,191 +0,0 @@
-{
- "cells": [
-  {
-   "cell_type": "code",
-   "execution_count": 31,
-   "metadata": {},
-   "outputs": [],
-   "source": [
-    "# Utilities to read/write csv files\n",
-    "import csv\n",
-    "# Utilities to handle character encodings\n",
-    "import unicodedata\n",
-    "# Ordered Dicts\n",
-    "from collections import OrderedDict\n",
-    "\n",
-    "import json\n",
-    "\n",
-    "\n",
-    "# OPZIONAL IMPORTS\n",
-    "\n",
-    "# For timestamping/simple speed tests\n",
-    "from datetime import datetime\n",
-    "# Random number generator\n",
-    "from random import *\n",
-    "# System & command line utilities\n",
-    "import sys\n",
-    "# Json for the dictionary\n",
-    "import json"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": 32,
-   "metadata": {},
-   "outputs": [],
-   "source": [
-    "import_dir = '/Users/federicaspinelli/Google Drive/OVI:CNR/CSV/ASPO/datini/'\n",
-    "export_dir = '/Users/federicaspinelli/Google Drive/OVI:CNR/RDF/ASPO/datini/'"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": 33,
-   "metadata": {},
-   "outputs": [],
-   "source": [
-    "# Custom class to store URIs + related infos for the ontologies/repositories\n",
-    "\n",
-    "class RDFcoords:\n",
-    "    def __init__(self, uri, prefix, code = None):\n",
-    "        self.uri = uri\n",
-    "        self.prefix = prefix\n",
-    "        self.code = code\n",
-    "\n",
-    "\n",
-    "# Repositories\n",
-    "datiniCoords = RDFcoords('<http://datini.archiviodistato.prato.it/la-ricerca/scheda/>', 'dt:')\n",
-    "# W3/CIDOC Predicates\n",
-    "hasTypeCoords = RDFcoords('<http://www.w3.org/1999/02/22-rdf-syntax-ns#type>', 'tp:')\n",
-    "carriesCoords = RDFcoords('<http://www.cidoc-crm.org/cidoc-crm/P128_carries>', 'ca:')\n",
-    "identifiedByCoords = RDFcoords('<http://www.cidoc-crm.org/cidoc-crm/P1_is_identified_by>', 'ib:')\n",
-    "labelCoords = RDFcoords('<http://www.w3.org/2000/01/rdf-schema#label>', 'lb:')\n",
-    "\n",
-    "# CIDOC Objects\n",
-    "manMadeObjectCoords = RDFcoords('<http://www.cidoc-crm.org/cidoc-crm/E22_Man-Made_Object>', 'mo:', 'E22')\n",
-    "informationObjectCoords = RDFcoords('<http://www.cidoc-crm.org/cidoc-crm/E73_Information_Object>', 'io:', 'E73')\n",
-    "titleCoords = RDFcoords('<http://www.cidoc-crm.org/cidoc-crm/E35_Title>', 'ti:' ,'E35')\n",
-    "placeAppellationCoords = RDFcoords('<http://www.cidoc-crm.org/cidoc-crm/E44_Place_appellation>', 'pa:', 'E44')\n",
-    "identifierCoords = RDFcoords('<http://www.cidoc-crm.org/cidoc-crm/E42_Identifier>', 'id:', 'E42')"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": 34,
-   "metadata": {},
-   "outputs": [],
-   "source": [
-    "# Basic functions for triples / shortened triples in TTL format\n",
-    "\n",
-    "def triple(subject, predicate, object1):\n",
-    "    line = subject + ' ' + predicate + ' ' + object1\n",
-    "    return line\n",
-    "\n",
-    "def doublet(predicate, object1):\n",
-    "    line = '    ' + predicate + ' ' + object1\n",
-    "    return line\n",
-    "\n",
-    "def singlet(object1):\n",
-    "    line = '        ' + object1\n",
-    "    return line\n",
-    "\n",
-    "# Line endings in TTL format\n",
-    "continueLine1 = ' ;\\n'\n",
-    "continueLine2 = ' ,\\n'\n",
-    "closeLine = ' .\\n'"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": 35,
-   "metadata": {},
-   "outputs": [],
-   "source": [
-    "def writeTTLHeader(output):\n",
-    "    output.write('@prefix ' + datiniCoords.prefix + ' ' + datiniCoords.uri + closeLine)\n",
-    "    output.write('@prefix ' + hasTypeCoords.prefix + ' ' + hasTypeCoords.uri + closeLine)\n",
-    "    output.write('@prefix ' + manMadeObjectCoords.prefix + ' ' + manMadeObjectCoords.uri + closeLine)\n",
-    "    output.write('@prefix ' + carriesCoords.prefix + ' ' + carriesCoords.uri + closeLine)\n",
-    "    output.write('@prefix ' + informationObjectCoords.prefix + ' ' + informationObjectCoords.uri + closeLine)\n",
-    "    output.write('@prefix ' + identifiedByCoords.prefix + ' ' + identifiedByCoords.uri + closeLine)\n",
-    "    output.write('@prefix ' + titleCoords.prefix + ' ' + titleCoords.uri + closeLine)\n",
-    "    output.write('@prefix ' + labelCoords.prefix + ' ' + labelCoords.uri + closeLine)\n",
-    "    output.write('@prefix ' + identifierCoords.prefix + ' ' + identifierCoords.uri + closeLine)\n",
-    "    output.write('\\n')\n"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": 36,
-   "metadata": {},
-   "outputs": [],
-   "source": [
-    "filePrefix = 'data_'\n",
-    "fileType = 'file'\n",
-    "max_entries = 1000000000\n",
-    "\n",
-    "with open(import_dir + filePrefix + fileType + '.csv', newline=\"\") as csv_file, open(export_dir + filePrefix + fileType + '.ttl', 'w') as output:\n",
-    "    reader = csv.DictReader(csv_file)\n",
-    "    writeTTLHeader(output)\n",
-    "    first = True\n",
-    "    ii = 0\n",
-    "    for row in reader:\n",
-    "        # The index ii is used to process a limited number of entries for testing purposes\n",
-    "        ii = ii+1\n",
-    "        # Skip the first line as it carries info we don't want to triplify\n",
-    "        if(first):\n",
-    "            first = False\n",
-    "            continue\n",
-    "        # Write E22 Man Made Object & E73 Information Object -- should exist for every entry?\n",
-    "        line = triple(datiniCoords.prefix + row['id'], hasTypeCoords.prefix, manMadeObjectCoords.prefix) + closeLine\n",
-    "        output.write(line)\n",
-    "        line = triple(datiniCoords.prefix + row['id'], carriesCoords.prefix, datiniCoords.prefix + row['id'] + informationObjectCoords.code) + closeLine\n",
-    "        output.write(line)\n",
-    "        line = triple(datiniCoords.prefix + row['id'] + informationObjectCoords.code, hasTypeCoords.prefix, informationObjectCoords.prefix) + closeLine\n",
-    "        output.write(line)\n",
-    "        #\n",
-    "        # If the 'titolo_aspo' property is not empty for the given entry, write down title-related triples\n",
-    "        if(row['titolo_aspo'] != 'None'):\n",
-    "            line = triple(datiniCoords.prefix + row['id'] + informationObjectCoords.code, identifiedByCoords.prefix, datiniCoords.prefix + row['id'] + titleCoords.code) + closeLine\n",
-    "            output.write(line)\n",
-    "            line = triple(datiniCoords.prefix + row['id'] + titleCoords.code, hasTypeCoords.prefix, titleCoords.prefix) + closeLine\n",
-    "            output.write(line)\n",
-    "            line = triple(datiniCoords.prefix + row['id'] + titleCoords.code, labelCoords.prefix, '\\\"' + row['titolo_aspo'].replace('\\\\','\\\\\\\\').replace('\"','\\\\\"')+ '\\\"') + closeLine\n",
-    "            output.write(line)\n",
-    "        #           \n",
-    "        output.write('\\n')\n",
-    "        #\n",
-    "        #\n",
-    "        # Limit number of entries processed (if desired)\n",
-    "        if(ii>max_entries):\n",
-    "            break\n",
-    "        "
-   ]
-  }
- ],
- "metadata": {
-  "kernelspec": {
-   "name": "python373jvsc74a57bd031f2aee4e71d21fbe5cf8b01ff0e069b9275f58929596ceb00d14d90e3e16cd6",
-   "display_name": "Python 3.7.3 64-bit"
-  },
-  "language_info": {
-   "codemirror_mode": {
-    "name": "ipython",
-    "version": 3
-   },
-   "file_extension": ".py",
-   "mimetype": "text/x-python",
-   "name": "python",
-   "nbconvert_exporter": "python",
-   "pygments_lexer": "ipython3",
-   "version": "3.7.3"
-  },
-  "metadata": {
-   "interpreter": {
-    "hash": "31f2aee4e71d21fbe5cf8b01ff0e069b9275f58929596ceb00d14d90e3e16cd6"
-   }
-  }
- },
- "nbformat": 4,
- "nbformat_minor": 2
-}