From 9991806ec1e8a108e8f141d030e2bd613fc478f1 Mon Sep 17 00:00:00 2001 From: JaseMK Date: Tue, 26 Sep 2023 10:08:22 +0100 Subject: [PATCH] update submodules --- .../ecosystem/.github/initialize.md | 7 + content/_spice-h2020/ecosystem/README.md | 83 ++++ content/_spice-h2020/ecosystem/VERSIONING.md | 79 ++++ .../ecosystem/_data/collections.md | 20 + ...12-05-22-Vivamus-porttitor-porta-tortor.md | 12 + .../2013-05-22-Nulla-vel-risus-dapibus.md | 12 + ...2013-06-22-Cum-sociis-natoque-penatibus.md | 12 + ...4-06-22-Maecenas-feugiat-fringilla-nibh.md | 12 + .../2014-07-22-Lorem-ipsum-dolor-sit-amet.md | 12 + .../_posts/2014-08-22-jekyll-clean-theme.md | 197 +++++++++ .../Polifonia-Corpus-Web-API/README.md | 82 ++++ .../Polifonia-Corpus/README.md | 228 ++++++++++ .../Polifonia-Corpus/annotations/README.md | 159 +++++++ .../Polifonia-Corpus/interrogation/README.md | 133 ++++++ .../README.md | 8 + .../ecosystem/broadcast-concerts-docs-blog.md | 21 + .../broadcast-concerts-docs-melody.md | 20 + .../broadcast-concerts-sparql-endpoint.md | 23 + .../ecosystem/broadcast-concerts.md | 62 +++ .../_polifonia-project/clef/LICENSE.md | 13 + .../content/_polifonia-project/clef/README.md | 65 +++ .../cometa-ontology/README.md | 107 +++++ .../cometa-ontology/header.md | 46 ++ .../_polifonia-project/dashboard/LICENSE.md | 13 + .../_polifonia-project/dashboard/README.md | 93 +++++ .../deep-listening/README.md | 29 ++ .../documentary-evidence-benchmark/README.md | 97 +++++ .../childKGC.md | 1 + .../external-components/README.md | 14 + .../components/endpoints/data-open.md | 49 +++ .../external-components/components/led.md | 36 ++ .../components/midi2vec.md | 39 ++ .../components/neuma-search-engine.md | 41 ++ .../external-components/components/ramose.md | 35 ++ .../sparql-anything/command-line.md | 114 +++++ .../components/sparql-anything/docker.md | 55 +++ .../sparql-anything/documentation.md | 27 ++ .../sparql-anything/fuseki-server.md | 40 ++ .../sparql-anything/java-source-code.md | 37 ++ .../sparql-anything/python-library.md | 23 + .../sparql-anything/requirements.md | 40 ++ .../sparql-anything/sparql-anything.md | 53 +++ .../components/sparql-anything/tutorials.md | 33 ++ .../folk_ngram_analysis/FoNN/LICENSE.md | 395 ++++++++++++++++++ .../FoNN/mtc_ann_corpus/notes.md | 12 + .../FoNN/thesession_corpus/notes.md | 12 + .../folk_ngram_analysis/README.md | 184 ++++++++ .../folk_ngram_analysis/cre_corpus/LICENSE.md | 24 ++ .../folk_ngram_analysis/cre_corpus/README.md | 94 +++++ .../root_note_detection/README.md | 66 +++ .../instrument-ontology/README.md | 88 ++++ .../instrument-ontology/header.md | 62 +++ .../_polifonia-project/lharp/README.md | 109 +++++ .../_polifonia-project/lharp/setup/README.md | 1 + .../lharp/similarities/README.md | 28 ++ .../sonification/full-tracks/audio/README.md | 5 + .../lharp/sonification/soundfonts/README.md | 1 + .../content/_polifonia-project/licences/KG.md | 32 ++ .../_polifonia-project/licences/Pipeline.md | 62 +++ .../_polifonia-project/licences/README.md | 8 + .../meetups-application/README.md | 3 + .../meetups-application.md | 35 ++ .../meetups-application/website/README.md | 65 +++ .../meetups-knowledge-graph/README.md | 105 +++++ .../meetups-ontology/README.md | 112 +++++ .../meetups-ui-design/README.md | 4 + .../meetups-ui-design/meetups-ui-design.md | 41 ++ .../meetups_corpus_collection/README.md | 75 ++++ .../meetups_pilot/README.md | 54 +++ .../meetups_pilot/README_coreference.md | 59 +++ .../meetups_pilot/README_data_cleaning.md | 69 +++ .../meetups_pilot/README_hm-identification.md | 65 +++ .../README_identification_themes.md | 86 ++++ .../README_people_places_identification.md | 80 ++++ .../meetups_pilot/README_time_expressions.md | 97 +++++ .../music-analysis-ontology/README.md | 13 + .../music-meta-ontology/README.md | 97 +++++ .../assets/ecosystem/meta_ontology_header.md | 73 ++++ .../assets/ecosystem/pymusicmeta_header.md | 51 +++ .../music-meta-ontology/website/README.md | 3 + .../music-meta-ontology/website/docs/intro.md | 3 + .../website/src/pages/markdown-page.md | 7 + .../musicbo-knowledge-graph/README.md | 63 +++ .../ontology-network/README.md | 61 +++ .../ontology-network/checklist.md | 37 ++ .../ontology-network/header.md | 74 ++++ .../ontology-network/website/README.md | 3 + .../website/blog/2023-04-10-welcome/index.md | 10 + .../website/blog/2023-04-19-evaluation.md | 11 + .../website/docs/bells/intro.md | 13 + .../website/docs/cometa/intro.md | 28 ++ .../website/docs/diagrams_howto.md | 12 + .../ontology-network/website/docs/intro.md | 8 + .../website/docs/music-algorithm/intro.md | 109 +++++ .../website/docs/music-instrument/intro.md | 18 + .../docs/music-meta/advanced/extra-a.md | 13 + .../docs/music-meta/advanced/extra-b.md | 13 + .../getting-started/congratulations.md | 19 + .../getting-started/create-an-artist.md | 15 + .../getting-started/create-music.md | 31 ++ .../getting-started/links_provenance.md | 16 + .../getting-started/music-performance.md | 23 + .../website/docs/music-meta/intro.md | 38 ++ .../website/docs/music-projection/intro.md | 19 + .../docs/music-representation/intro.md | 15 + .../website/src/pages/markdown-page.md | 7 + .../P2KG-Pipeline/readme.md | 99 +++++ .../patterns-knowledge-graph/README.md | 99 +++++ .../RDF/README.md | 26 ++ .../_polifonia-project/pitchcontext/README.md | 56 +++ .../polifonia-lexicon/README.md | 27 ++ .../registry_app/LICENSE.md | 13 + .../_polifonia-project/registry_app/README.md | 10 + .../registry_app/ecosystem/container.md | 17 + .../registry_app/ecosystem/dataset.md | 42 ++ .../registry_app/ecosystem/interface.md | 37 ++ .../_polifonia-project/rulebook/CHAMPIONS.md | 65 +++ .../_polifonia-project/rulebook/README.md | 148 +++++++ .../rulebook/deliverable_guidelines.md | 158 +++++++ ...KG-development-documentation-guidelines.md | 85 ++++ .../_polifonia-project/rulebook/schema.md | 136 ++++++ .../source-ontology/README.md | 43 ++ .../source-ontology/header.md | 58 +++ .../stories/.github/ISSUE_TEMPLATE/story.md | 35 ++ .../Amy_Organologist/Amy#1_OrganTrends.md | 44 ++ .../Amy_Organologist/Amy#2_OrganBuilders.md | 42 ++ .../stories/Amy_Organologist/readme.md | 36 ++ .../Andrea#1_Serendipity.md | 58 +++ .../stories/Andrea_Theology_Scholar/readme.md | 38 ++ .../Anna#1_HearingMusic.md | 41 ++ .../stories/Anna_Hearing-impaired/readme.md | 31 ++ .../Brendan#1_FindTraditionalMusic.md | 52 +++ .../Brendan_Traditional_Musician/readme.md | 32 ++ .../Carolina#1_SourcesCrossAnalysis.md | 86 ++++ .../Carolina_Music_Historian/readme.md | 41 ++ .../David#1_MusicHistorian.md | 72 ++++ .../stories/David_Music_Historian/readme.md | 29 ++ .../Frank_Organist/Frank#1_OrganKnowledge.md | 50 +++ .../stories/Frank_Organist/readme.md | 35 ++ .../Jorge#1_OrganizeMyLibrary.md | 65 +++ .../Jorge#2_FindSimilarScores.md | 64 +++ .../Jorge#3_DynamicExploration.md | 48 +++ .../Jorge_Librarian/Jorge#4_LinkToSources.md | 56 +++ .../stories/Jorge_Librarian/readme.md | 38 ++ .../Keith#1_MusicConnections.md | 53 +++ .../stories/Keith_Music_Producer/readme.md | 33 ++ .../Keoma#1_RestorationAndSoundPractices.md | 167 ++++++++ .../stories/Keoma_Architect/readme.md | 36 ++ .../_polifonia-project/stories/LICENSE.md | 96 +++++ .../Laurent#1_MusicArchives.md | 54 +++ .../Laurent_Music_Journalist/readme.md | 33 ++ .../Linka#1_MusicKnowledge.md | 102 +++++ .../Linka_Computer_Scientist/readme.md | 33 ++ .../Mark#1_FolkMusic.md | 157 +++++++ .../Mark_Computational_Musicologist/readme.md | 33 ++ .../Ortenz#1_MusicAndChildhood.md | 72 ++++ .../Ortenz#2_MusicalSocialNetwork.md | 78 ++++ .../stories/Ortenz_Music_Historian/readme.md | 38 ++ ...ia#1_IdentificationOfIntangibleElements.md | 49 +++ .../Patrizia_Ethnoanthropologist/readme.md | 35 ++ .../Paul#1_OrganComparison.md | 166 ++++++++ .../Paul#2_ResourceReliability.md | 43 ++ .../stories/Paul_Organ_Advisor/readme.md | 33 ++ .../_polifonia-project/stories/README.md | 98 +++++ .../Ralph#1_WordsAndMusic.md | 41 ++ .../stories/Ralph_Music_Historian/readme.md | 33 ++ ...1_ConflictingTheoreticalInterpretations.md | 112 +++++ .../Sethus#2_CreateRelevantCorpus.md | 67 +++ ...thus#3_ConflictingAnalyticalAnnotations.md | 156 +++++++ .../stories/Sethus_Music_Theorist/readme.md | 36 ++ .../Sonia#1_ExplorationMode.md | 52 +++ .../Sonia#2_ShuffleMode.md | 50 +++ .../Sonia_Playlist_User/Sonia#3_StatsMode.md | 63 +++ .../stories/Sonia_Playlist_User/readme.md | 35 ++ .../Sophia#1_MusiciansAndTheirEnvironment.md | 61 +++ .../Sophia#2_OriginsAndForm.md | 77 ++++ .../Sophia#3_Reorchestration.md | 59 +++ .../stories/Sophia_Musicologist/readme.md | 43 ++ .../Valeriana#1_DiscourseAnalysis.md | 44 ++ .../Valeriana#2_Terminology.md | 50 +++ .../stories/Valeriana_Linguist/readme.md | 33 ++ .../William#1EuropeanFolkMusic.md | 73 ++++ .../William_Curator_Europeana/readme.md | 31 ++ .../textual-corpus-population/README.md | 294 +++++++++++++ .../ocreval/CHANGELOG.md | 66 +++ .../ocreval/README.md | 119 ++++++ .../tonalities_pilot/README.md | 1 + .../tonalities_pilot/Tonalities.md | 36 ++ .../tonalities_pilot/mockup/changeLog.md | 5 + .../tonalities_pilot/mockup/mockup.md | 50 +++ .../tunes-ontology/README.md | 67 +++ .../tunes-ontology/header.md | 46 ++ .../_polifonia-project/web_portal/README.md | 32 ++ .../web_portal/analysis/data_layer.md | 42 ++ .../web_portal/apis/README.md | 17 + .../content/_smashub/choco/LICENSE.md | 1 + .../content/_smashub/choco/README.md | 249 +++++++++++ .../content/_smashub/choco/assets/header.md | 131 ++++++ .../biab-internet-corpus/raw/README.md | 1 + .../choco/partitions/chordify/readme.md | 142 +++++++ .../raw/harmonies/README.md | 37 ++ .../Boulanger,_Lili/_/Attente/README.md | 19 + .../content/_smashub/harmory/README.md | 151 +++++++ .../content/_smashub/harmory/assets/header.md | 49 +++ content/_spice-h2020/ecosystem/index.md | 142 +++++++ .../ecosystem/pages/_external-components.md | 17 + .../_spice-h2020/ecosystem/pages/_stories.md | 24 ++ content/_spice-h2020/ecosystem/pages/_tags.md | 36 ++ content/_spice-h2020/ecosystem/pages/data.md | 61 +++ .../_spice-h2020/ecosystem/pages/licences.md | 67 +++ .../_spice-h2020/ecosystem/pages/personas.md | 29 ++ .../ecosystem/pages/pilots/access.md | 26 ++ .../ecosystem/pages/pilots/bells.md | 24 ++ .../ecosystem/pages/pilots/child.md | 23 + .../ecosystem/pages/pilots/facets.md | 23 + .../ecosystem/pages/pilots/index.md | 19 + .../ecosystem/pages/pilots/interlink.md | 23 + .../ecosystem/pages/pilots/meetups.md | 23 + .../ecosystem/pages/pilots/musicbo.md | 23 + .../ecosystem/pages/pilots/organs.md | 24 ++ .../ecosystem/pages/pilots/tonalities.md | 23 + .../ecosystem/pages/pilots/tunes.md | 23 + .../_spice-h2020/ecosystem/pages/report.md | 60 +++ .../_spice-h2020/ecosystem/pages/rulebook.md | 131 ++++++ .../_spice-h2020/ecosystem/pages/software.md | 68 +++ .../ecosystem/pages/work-packages/index.md | 22 + .../ecosystem/pages/work-packages/wp1.md | 31 ++ .../ecosystem/pages/work-packages/wp2.md | 23 + .../ecosystem/pages/work-packages/wp3.md | 22 + .../ecosystem/pages/work-packages/wp4.md | 22 + .../ecosystem/pages/work-packages/wp5.md | 22 + .../ecosystem/pages/work-packages/wp6.md | 22 + .../ecosystem/pages/work-packages/wp7.md | 23 + .../ecosystem/pages/work-packages/wp8.md | 23 + 234 files changed, 12609 insertions(+) create mode 100644 content/_spice-h2020/ecosystem/.github/initialize.md create mode 100644 content/_spice-h2020/ecosystem/README.md create mode 100644 content/_spice-h2020/ecosystem/VERSIONING.md create mode 100644 content/_spice-h2020/ecosystem/_data/collections.md create mode 100644 content/_spice-h2020/ecosystem/_posts/2012-05-22-Vivamus-porttitor-porta-tortor.md create mode 100644 content/_spice-h2020/ecosystem/_posts/2013-05-22-Nulla-vel-risus-dapibus.md create mode 100644 content/_spice-h2020/ecosystem/_posts/2013-06-22-Cum-sociis-natoque-penatibus.md create mode 100644 content/_spice-h2020/ecosystem/_posts/2014-06-22-Maecenas-feugiat-fringilla-nibh.md create mode 100644 content/_spice-h2020/ecosystem/_posts/2014-07-22-Lorem-ipsum-dolor-sit-amet.md create mode 100644 content/_spice-h2020/ecosystem/_posts/2014-08-22-jekyll-clean-theme.md create mode 100644 content/_spice-h2020/ecosystem/content/_polifonia-project/Polifonia-Corpus-Web-API/README.md create mode 100755 content/_spice-h2020/ecosystem/content/_polifonia-project/Polifonia-Corpus/README.md create mode 100755 content/_spice-h2020/ecosystem/content/_polifonia-project/Polifonia-Corpus/annotations/README.md create mode 100755 content/_spice-h2020/ecosystem/content/_polifonia-project/Polifonia-Corpus/interrogation/README.md create mode 100644 content/_spice-h2020/ecosystem/content/_polifonia-project/broadcast-concerts-knowledge-graph/README.md create mode 100644 content/_spice-h2020/ecosystem/content/_polifonia-project/broadcast-concerts-knowledge-graph/ecosystem/broadcast-concerts-docs-blog.md create mode 100644 content/_spice-h2020/ecosystem/content/_polifonia-project/broadcast-concerts-knowledge-graph/ecosystem/broadcast-concerts-docs-melody.md create mode 100644 content/_spice-h2020/ecosystem/content/_polifonia-project/broadcast-concerts-knowledge-graph/ecosystem/broadcast-concerts-sparql-endpoint.md create mode 100644 content/_spice-h2020/ecosystem/content/_polifonia-project/broadcast-concerts-knowledge-graph/ecosystem/broadcast-concerts.md create mode 100644 content/_spice-h2020/ecosystem/content/_polifonia-project/clef/LICENSE.md create mode 100644 content/_spice-h2020/ecosystem/content/_polifonia-project/clef/README.md create mode 100644 content/_spice-h2020/ecosystem/content/_polifonia-project/cometa-ontology/README.md create mode 100644 content/_spice-h2020/ecosystem/content/_polifonia-project/cometa-ontology/header.md create mode 100644 content/_spice-h2020/ecosystem/content/_polifonia-project/dashboard/LICENSE.md create mode 100644 content/_spice-h2020/ecosystem/content/_polifonia-project/dashboard/README.md create mode 100644 content/_spice-h2020/ecosystem/content/_polifonia-project/deep-listening/README.md create mode 100644 content/_spice-h2020/ecosystem/content/_polifonia-project/documentary-evidence-benchmark/README.md create mode 100644 content/_spice-h2020/ecosystem/content/_polifonia-project/documentary-evidence-benchmark/childKGC.md create mode 100644 content/_spice-h2020/ecosystem/content/_polifonia-project/external-components/README.md create mode 100644 content/_spice-h2020/ecosystem/content/_polifonia-project/external-components/components/endpoints/data-open.md create mode 100644 content/_spice-h2020/ecosystem/content/_polifonia-project/external-components/components/led.md create mode 100644 content/_spice-h2020/ecosystem/content/_polifonia-project/external-components/components/midi2vec.md create mode 100644 content/_spice-h2020/ecosystem/content/_polifonia-project/external-components/components/neuma-search-engine.md create mode 100644 content/_spice-h2020/ecosystem/content/_polifonia-project/external-components/components/ramose.md create mode 100644 content/_spice-h2020/ecosystem/content/_polifonia-project/external-components/components/sparql-anything/command-line.md create mode 100644 content/_spice-h2020/ecosystem/content/_polifonia-project/external-components/components/sparql-anything/docker.md create mode 100644 content/_spice-h2020/ecosystem/content/_polifonia-project/external-components/components/sparql-anything/documentation.md create mode 100644 content/_spice-h2020/ecosystem/content/_polifonia-project/external-components/components/sparql-anything/fuseki-server.md create mode 100644 content/_spice-h2020/ecosystem/content/_polifonia-project/external-components/components/sparql-anything/java-source-code.md create mode 100644 content/_spice-h2020/ecosystem/content/_polifonia-project/external-components/components/sparql-anything/python-library.md create mode 100644 content/_spice-h2020/ecosystem/content/_polifonia-project/external-components/components/sparql-anything/requirements.md create mode 100644 content/_spice-h2020/ecosystem/content/_polifonia-project/external-components/components/sparql-anything/sparql-anything.md create mode 100644 content/_spice-h2020/ecosystem/content/_polifonia-project/external-components/components/sparql-anything/tutorials.md create mode 100644 content/_spice-h2020/ecosystem/content/_polifonia-project/folk_ngram_analysis/FoNN/LICENSE.md create mode 100644 content/_spice-h2020/ecosystem/content/_polifonia-project/folk_ngram_analysis/FoNN/mtc_ann_corpus/notes.md create mode 100644 content/_spice-h2020/ecosystem/content/_polifonia-project/folk_ngram_analysis/FoNN/thesession_corpus/notes.md create mode 100644 content/_spice-h2020/ecosystem/content/_polifonia-project/folk_ngram_analysis/README.md create mode 100644 content/_spice-h2020/ecosystem/content/_polifonia-project/folk_ngram_analysis/cre_corpus/LICENSE.md create mode 100644 content/_spice-h2020/ecosystem/content/_polifonia-project/folk_ngram_analysis/cre_corpus/README.md create mode 100644 content/_spice-h2020/ecosystem/content/_polifonia-project/folk_ngram_analysis/root_note_detection/README.md create mode 100644 content/_spice-h2020/ecosystem/content/_polifonia-project/instrument-ontology/README.md create mode 100644 content/_spice-h2020/ecosystem/content/_polifonia-project/instrument-ontology/header.md create mode 100644 content/_spice-h2020/ecosystem/content/_polifonia-project/lharp/README.md create mode 100644 content/_spice-h2020/ecosystem/content/_polifonia-project/lharp/setup/README.md create mode 100644 content/_spice-h2020/ecosystem/content/_polifonia-project/lharp/similarities/README.md create mode 100644 content/_spice-h2020/ecosystem/content/_polifonia-project/lharp/sonification/full-tracks/audio/README.md create mode 100644 content/_spice-h2020/ecosystem/content/_polifonia-project/lharp/sonification/soundfonts/README.md create mode 100644 content/_spice-h2020/ecosystem/content/_polifonia-project/licences/KG.md create mode 100644 content/_spice-h2020/ecosystem/content/_polifonia-project/licences/Pipeline.md create mode 100644 content/_spice-h2020/ecosystem/content/_polifonia-project/licences/README.md create mode 100644 content/_spice-h2020/ecosystem/content/_polifonia-project/meetups-application/README.md create mode 100644 content/_spice-h2020/ecosystem/content/_polifonia-project/meetups-application/meetups-application.md create mode 100644 content/_spice-h2020/ecosystem/content/_polifonia-project/meetups-application/website/README.md create mode 100755 content/_spice-h2020/ecosystem/content/_polifonia-project/meetups-knowledge-graph/README.md create mode 100755 content/_spice-h2020/ecosystem/content/_polifonia-project/meetups-ontology/README.md create mode 100644 content/_spice-h2020/ecosystem/content/_polifonia-project/meetups-ui-design/README.md create mode 100644 content/_spice-h2020/ecosystem/content/_polifonia-project/meetups-ui-design/meetups-ui-design.md create mode 100644 content/_spice-h2020/ecosystem/content/_polifonia-project/meetups_corpus_collection/README.md create mode 100644 content/_spice-h2020/ecosystem/content/_polifonia-project/meetups_pilot/README.md create mode 100644 content/_spice-h2020/ecosystem/content/_polifonia-project/meetups_pilot/README_coreference.md create mode 100644 content/_spice-h2020/ecosystem/content/_polifonia-project/meetups_pilot/README_data_cleaning.md create mode 100644 content/_spice-h2020/ecosystem/content/_polifonia-project/meetups_pilot/README_hm-identification.md create mode 100644 content/_spice-h2020/ecosystem/content/_polifonia-project/meetups_pilot/README_identification_themes.md create mode 100644 content/_spice-h2020/ecosystem/content/_polifonia-project/meetups_pilot/README_people_places_identification.md create mode 100644 content/_spice-h2020/ecosystem/content/_polifonia-project/meetups_pilot/README_time_expressions.md create mode 100644 content/_spice-h2020/ecosystem/content/_polifonia-project/music-analysis-ontology/README.md create mode 100644 content/_spice-h2020/ecosystem/content/_polifonia-project/music-meta-ontology/README.md create mode 100644 content/_spice-h2020/ecosystem/content/_polifonia-project/music-meta-ontology/assets/ecosystem/meta_ontology_header.md create mode 100644 content/_spice-h2020/ecosystem/content/_polifonia-project/music-meta-ontology/assets/ecosystem/pymusicmeta_header.md create mode 100644 content/_spice-h2020/ecosystem/content/_polifonia-project/music-meta-ontology/website/README.md create mode 100644 content/_spice-h2020/ecosystem/content/_polifonia-project/music-meta-ontology/website/docs/intro.md create mode 100644 content/_spice-h2020/ecosystem/content/_polifonia-project/music-meta-ontology/website/src/pages/markdown-page.md create mode 100644 content/_spice-h2020/ecosystem/content/_polifonia-project/musicbo-knowledge-graph/README.md create mode 100644 content/_spice-h2020/ecosystem/content/_polifonia-project/ontology-network/README.md create mode 100644 content/_spice-h2020/ecosystem/content/_polifonia-project/ontology-network/checklist.md create mode 100644 content/_spice-h2020/ecosystem/content/_polifonia-project/ontology-network/header.md create mode 100644 content/_spice-h2020/ecosystem/content/_polifonia-project/ontology-network/website/README.md create mode 100644 content/_spice-h2020/ecosystem/content/_polifonia-project/ontology-network/website/blog/2023-04-10-welcome/index.md create mode 100644 content/_spice-h2020/ecosystem/content/_polifonia-project/ontology-network/website/blog/2023-04-19-evaluation.md create mode 100644 content/_spice-h2020/ecosystem/content/_polifonia-project/ontology-network/website/docs/bells/intro.md create mode 100644 content/_spice-h2020/ecosystem/content/_polifonia-project/ontology-network/website/docs/cometa/intro.md create mode 100644 content/_spice-h2020/ecosystem/content/_polifonia-project/ontology-network/website/docs/diagrams_howto.md create mode 100644 content/_spice-h2020/ecosystem/content/_polifonia-project/ontology-network/website/docs/intro.md create mode 100644 content/_spice-h2020/ecosystem/content/_polifonia-project/ontology-network/website/docs/music-algorithm/intro.md create mode 100644 content/_spice-h2020/ecosystem/content/_polifonia-project/ontology-network/website/docs/music-instrument/intro.md create mode 100644 content/_spice-h2020/ecosystem/content/_polifonia-project/ontology-network/website/docs/music-meta/advanced/extra-a.md create mode 100644 content/_spice-h2020/ecosystem/content/_polifonia-project/ontology-network/website/docs/music-meta/advanced/extra-b.md create mode 100644 content/_spice-h2020/ecosystem/content/_polifonia-project/ontology-network/website/docs/music-meta/getting-started/congratulations.md create mode 100644 content/_spice-h2020/ecosystem/content/_polifonia-project/ontology-network/website/docs/music-meta/getting-started/create-an-artist.md create mode 100644 content/_spice-h2020/ecosystem/content/_polifonia-project/ontology-network/website/docs/music-meta/getting-started/create-music.md create mode 100644 content/_spice-h2020/ecosystem/content/_polifonia-project/ontology-network/website/docs/music-meta/getting-started/links_provenance.md create mode 100644 content/_spice-h2020/ecosystem/content/_polifonia-project/ontology-network/website/docs/music-meta/getting-started/music-performance.md create mode 100644 content/_spice-h2020/ecosystem/content/_polifonia-project/ontology-network/website/docs/music-meta/intro.md create mode 100644 content/_spice-h2020/ecosystem/content/_polifonia-project/ontology-network/website/docs/music-projection/intro.md create mode 100644 content/_spice-h2020/ecosystem/content/_polifonia-project/ontology-network/website/docs/music-representation/intro.md create mode 100644 content/_spice-h2020/ecosystem/content/_polifonia-project/ontology-network/website/src/pages/markdown-page.md create mode 100644 content/_spice-h2020/ecosystem/content/_polifonia-project/patterns-knowledge-graph/P2KG-Pipeline/readme.md create mode 100644 content/_spice-h2020/ecosystem/content/_polifonia-project/patterns-knowledge-graph/README.md create mode 100644 content/_spice-h2020/ecosystem/content/_polifonia-project/patterns-knowledge-graph/patterns-knowledge-graph-datasets/RDF/README.md create mode 100644 content/_spice-h2020/ecosystem/content/_polifonia-project/pitchcontext/README.md create mode 100644 content/_spice-h2020/ecosystem/content/_polifonia-project/polifonia-lexicon/README.md create mode 100644 content/_spice-h2020/ecosystem/content/_polifonia-project/registry_app/LICENSE.md create mode 100644 content/_spice-h2020/ecosystem/content/_polifonia-project/registry_app/README.md create mode 100644 content/_spice-h2020/ecosystem/content/_polifonia-project/registry_app/ecosystem/container.md create mode 100644 content/_spice-h2020/ecosystem/content/_polifonia-project/registry_app/ecosystem/dataset.md create mode 100644 content/_spice-h2020/ecosystem/content/_polifonia-project/registry_app/ecosystem/interface.md create mode 100644 content/_spice-h2020/ecosystem/content/_polifonia-project/rulebook/CHAMPIONS.md create mode 100644 content/_spice-h2020/ecosystem/content/_polifonia-project/rulebook/README.md create mode 100644 content/_spice-h2020/ecosystem/content/_polifonia-project/rulebook/deliverable_guidelines.md create mode 100644 content/_spice-h2020/ecosystem/content/_polifonia-project/rulebook/ontology-KG-development-documentation-guidelines.md create mode 100644 content/_spice-h2020/ecosystem/content/_polifonia-project/rulebook/schema.md create mode 100644 content/_spice-h2020/ecosystem/content/_polifonia-project/source-ontology/README.md create mode 100644 content/_spice-h2020/ecosystem/content/_polifonia-project/source-ontology/header.md create mode 100644 content/_spice-h2020/ecosystem/content/_polifonia-project/stories/.github/ISSUE_TEMPLATE/story.md create mode 100644 content/_spice-h2020/ecosystem/content/_polifonia-project/stories/Amy_Organologist/Amy#1_OrganTrends.md create mode 100644 content/_spice-h2020/ecosystem/content/_polifonia-project/stories/Amy_Organologist/Amy#2_OrganBuilders.md create mode 100644 content/_spice-h2020/ecosystem/content/_polifonia-project/stories/Amy_Organologist/readme.md create mode 100644 content/_spice-h2020/ecosystem/content/_polifonia-project/stories/Andrea_Theology_Scholar/Andrea#1_Serendipity.md create mode 100644 content/_spice-h2020/ecosystem/content/_polifonia-project/stories/Andrea_Theology_Scholar/readme.md create mode 100644 content/_spice-h2020/ecosystem/content/_polifonia-project/stories/Anna_Hearing-impaired/Anna#1_HearingMusic.md create mode 100644 content/_spice-h2020/ecosystem/content/_polifonia-project/stories/Anna_Hearing-impaired/readme.md create mode 100644 content/_spice-h2020/ecosystem/content/_polifonia-project/stories/Brendan_Traditional_Musician/Brendan#1_FindTraditionalMusic.md create mode 100644 content/_spice-h2020/ecosystem/content/_polifonia-project/stories/Brendan_Traditional_Musician/readme.md create mode 100644 content/_spice-h2020/ecosystem/content/_polifonia-project/stories/Carolina_Music_Historian/Carolina#1_SourcesCrossAnalysis.md create mode 100644 content/_spice-h2020/ecosystem/content/_polifonia-project/stories/Carolina_Music_Historian/readme.md create mode 100644 content/_spice-h2020/ecosystem/content/_polifonia-project/stories/David_Music_Historian/David#1_MusicHistorian.md create mode 100644 content/_spice-h2020/ecosystem/content/_polifonia-project/stories/David_Music_Historian/readme.md create mode 100644 content/_spice-h2020/ecosystem/content/_polifonia-project/stories/Frank_Organist/Frank#1_OrganKnowledge.md create mode 100644 content/_spice-h2020/ecosystem/content/_polifonia-project/stories/Frank_Organist/readme.md create mode 100644 content/_spice-h2020/ecosystem/content/_polifonia-project/stories/Jorge_Librarian/Jorge#1_OrganizeMyLibrary.md create mode 100644 content/_spice-h2020/ecosystem/content/_polifonia-project/stories/Jorge_Librarian/Jorge#2_FindSimilarScores.md create mode 100644 content/_spice-h2020/ecosystem/content/_polifonia-project/stories/Jorge_Librarian/Jorge#3_DynamicExploration.md create mode 100644 content/_spice-h2020/ecosystem/content/_polifonia-project/stories/Jorge_Librarian/Jorge#4_LinkToSources.md create mode 100644 content/_spice-h2020/ecosystem/content/_polifonia-project/stories/Jorge_Librarian/readme.md create mode 100644 content/_spice-h2020/ecosystem/content/_polifonia-project/stories/Keith_Music_Producer/Keith#1_MusicConnections.md create mode 100644 content/_spice-h2020/ecosystem/content/_polifonia-project/stories/Keith_Music_Producer/readme.md create mode 100644 content/_spice-h2020/ecosystem/content/_polifonia-project/stories/Keoma_Architect/Keoma#1_RestorationAndSoundPractices.md create mode 100644 content/_spice-h2020/ecosystem/content/_polifonia-project/stories/Keoma_Architect/readme.md create mode 100644 content/_spice-h2020/ecosystem/content/_polifonia-project/stories/LICENSE.md create mode 100644 content/_spice-h2020/ecosystem/content/_polifonia-project/stories/Laurent_Music_Journalist/Laurent#1_MusicArchives.md create mode 100644 content/_spice-h2020/ecosystem/content/_polifonia-project/stories/Laurent_Music_Journalist/readme.md create mode 100644 content/_spice-h2020/ecosystem/content/_polifonia-project/stories/Linka_Computer_Scientist/Linka#1_MusicKnowledge.md create mode 100644 content/_spice-h2020/ecosystem/content/_polifonia-project/stories/Linka_Computer_Scientist/readme.md create mode 100644 content/_spice-h2020/ecosystem/content/_polifonia-project/stories/Mark_Computational_Musicologist/Mark#1_FolkMusic.md create mode 100644 content/_spice-h2020/ecosystem/content/_polifonia-project/stories/Mark_Computational_Musicologist/readme.md create mode 100644 content/_spice-h2020/ecosystem/content/_polifonia-project/stories/Ortenz_Music_Historian/Ortenz#1_MusicAndChildhood.md create mode 100644 content/_spice-h2020/ecosystem/content/_polifonia-project/stories/Ortenz_Music_Historian/Ortenz#2_MusicalSocialNetwork.md create mode 100644 content/_spice-h2020/ecosystem/content/_polifonia-project/stories/Ortenz_Music_Historian/readme.md create mode 100644 content/_spice-h2020/ecosystem/content/_polifonia-project/stories/Patrizia_Ethnoanthropologist/Patrizia#1_IdentificationOfIntangibleElements.md create mode 100644 content/_spice-h2020/ecosystem/content/_polifonia-project/stories/Patrizia_Ethnoanthropologist/readme.md create mode 100644 content/_spice-h2020/ecosystem/content/_polifonia-project/stories/Paul_Organ_Advisor/Paul#1_OrganComparison.md create mode 100644 content/_spice-h2020/ecosystem/content/_polifonia-project/stories/Paul_Organ_Advisor/Paul#2_ResourceReliability.md create mode 100644 content/_spice-h2020/ecosystem/content/_polifonia-project/stories/Paul_Organ_Advisor/readme.md create mode 100644 content/_spice-h2020/ecosystem/content/_polifonia-project/stories/README.md create mode 100644 content/_spice-h2020/ecosystem/content/_polifonia-project/stories/Ralph_Music_Historian/Ralph#1_WordsAndMusic.md create mode 100644 content/_spice-h2020/ecosystem/content/_polifonia-project/stories/Ralph_Music_Historian/readme.md create mode 100644 content/_spice-h2020/ecosystem/content/_polifonia-project/stories/Sethus_Music_Theorist/Sethus#1_ConflictingTheoreticalInterpretations.md create mode 100644 content/_spice-h2020/ecosystem/content/_polifonia-project/stories/Sethus_Music_Theorist/Sethus#2_CreateRelevantCorpus.md create mode 100644 content/_spice-h2020/ecosystem/content/_polifonia-project/stories/Sethus_Music_Theorist/Sethus#3_ConflictingAnalyticalAnnotations.md create mode 100644 content/_spice-h2020/ecosystem/content/_polifonia-project/stories/Sethus_Music_Theorist/readme.md create mode 100644 content/_spice-h2020/ecosystem/content/_polifonia-project/stories/Sonia_Playlist_User/Sonia#1_ExplorationMode.md create mode 100644 content/_spice-h2020/ecosystem/content/_polifonia-project/stories/Sonia_Playlist_User/Sonia#2_ShuffleMode.md create mode 100644 content/_spice-h2020/ecosystem/content/_polifonia-project/stories/Sonia_Playlist_User/Sonia#3_StatsMode.md create mode 100644 content/_spice-h2020/ecosystem/content/_polifonia-project/stories/Sonia_Playlist_User/readme.md create mode 100644 content/_spice-h2020/ecosystem/content/_polifonia-project/stories/Sophia_Musicologist/Sophia#1_MusiciansAndTheirEnvironment.md create mode 100644 content/_spice-h2020/ecosystem/content/_polifonia-project/stories/Sophia_Musicologist/Sophia#2_OriginsAndForm.md create mode 100644 content/_spice-h2020/ecosystem/content/_polifonia-project/stories/Sophia_Musicologist/Sophia#3_Reorchestration.md create mode 100644 content/_spice-h2020/ecosystem/content/_polifonia-project/stories/Sophia_Musicologist/readme.md create mode 100644 content/_spice-h2020/ecosystem/content/_polifonia-project/stories/Valeriana_Linguist/Valeriana#1_DiscourseAnalysis.md create mode 100644 content/_spice-h2020/ecosystem/content/_polifonia-project/stories/Valeriana_Linguist/Valeriana#2_Terminology.md create mode 100644 content/_spice-h2020/ecosystem/content/_polifonia-project/stories/Valeriana_Linguist/readme.md create mode 100644 content/_spice-h2020/ecosystem/content/_polifonia-project/stories/William_Curator_Europeana/William#1EuropeanFolkMusic.md create mode 100644 content/_spice-h2020/ecosystem/content/_polifonia-project/stories/William_Curator_Europeana/readme.md create mode 100644 content/_spice-h2020/ecosystem/content/_polifonia-project/textual-corpus-population/README.md create mode 100644 content/_spice-h2020/ecosystem/content/_polifonia-project/textual-corpus-population/ocreval/CHANGELOG.md create mode 100644 content/_spice-h2020/ecosystem/content/_polifonia-project/textual-corpus-population/ocreval/README.md create mode 100644 content/_spice-h2020/ecosystem/content/_polifonia-project/tonalities_pilot/README.md create mode 100644 content/_spice-h2020/ecosystem/content/_polifonia-project/tonalities_pilot/Tonalities.md create mode 100644 content/_spice-h2020/ecosystem/content/_polifonia-project/tonalities_pilot/mockup/changeLog.md create mode 100644 content/_spice-h2020/ecosystem/content/_polifonia-project/tonalities_pilot/mockup/mockup.md create mode 100644 content/_spice-h2020/ecosystem/content/_polifonia-project/tunes-ontology/README.md create mode 100644 content/_spice-h2020/ecosystem/content/_polifonia-project/tunes-ontology/header.md create mode 100644 content/_spice-h2020/ecosystem/content/_polifonia-project/web_portal/README.md create mode 100644 content/_spice-h2020/ecosystem/content/_polifonia-project/web_portal/analysis/data_layer.md create mode 100644 content/_spice-h2020/ecosystem/content/_polifonia-project/web_portal/apis/README.md create mode 100644 content/_spice-h2020/ecosystem/content/_smashub/choco/LICENSE.md create mode 100644 content/_spice-h2020/ecosystem/content/_smashub/choco/README.md create mode 100644 content/_spice-h2020/ecosystem/content/_smashub/choco/assets/header.md create mode 100644 content/_spice-h2020/ecosystem/content/_smashub/choco/partitions/biab-internet-corpus/raw/README.md create mode 100644 content/_spice-h2020/ecosystem/content/_smashub/choco/partitions/chordify/readme.md create mode 100644 content/_spice-h2020/ecosystem/content/_smashub/choco/partitions/mozart-piano-sonatas/raw/harmonies/README.md create mode 100644 content/_spice-h2020/ecosystem/content/_smashub/choco/partitions/when-in-rome/raw/OpenScore-LiederCorpus/Boulanger,_Lili/_/Attente/README.md create mode 100644 content/_spice-h2020/ecosystem/content/_smashub/harmory/README.md create mode 100644 content/_spice-h2020/ecosystem/content/_smashub/harmory/assets/header.md create mode 100644 content/_spice-h2020/ecosystem/index.md create mode 100644 content/_spice-h2020/ecosystem/pages/_external-components.md create mode 100644 content/_spice-h2020/ecosystem/pages/_stories.md create mode 100644 content/_spice-h2020/ecosystem/pages/_tags.md create mode 100644 content/_spice-h2020/ecosystem/pages/data.md create mode 100644 content/_spice-h2020/ecosystem/pages/licences.md create mode 100644 content/_spice-h2020/ecosystem/pages/personas.md create mode 100644 content/_spice-h2020/ecosystem/pages/pilots/access.md create mode 100644 content/_spice-h2020/ecosystem/pages/pilots/bells.md create mode 100644 content/_spice-h2020/ecosystem/pages/pilots/child.md create mode 100644 content/_spice-h2020/ecosystem/pages/pilots/facets.md create mode 100644 content/_spice-h2020/ecosystem/pages/pilots/index.md create mode 100644 content/_spice-h2020/ecosystem/pages/pilots/interlink.md create mode 100644 content/_spice-h2020/ecosystem/pages/pilots/meetups.md create mode 100644 content/_spice-h2020/ecosystem/pages/pilots/musicbo.md create mode 100644 content/_spice-h2020/ecosystem/pages/pilots/organs.md create mode 100644 content/_spice-h2020/ecosystem/pages/pilots/tonalities.md create mode 100644 content/_spice-h2020/ecosystem/pages/pilots/tunes.md create mode 100644 content/_spice-h2020/ecosystem/pages/report.md create mode 100644 content/_spice-h2020/ecosystem/pages/rulebook.md create mode 100644 content/_spice-h2020/ecosystem/pages/software.md create mode 100644 content/_spice-h2020/ecosystem/pages/work-packages/index.md create mode 100644 content/_spice-h2020/ecosystem/pages/work-packages/wp1.md create mode 100644 content/_spice-h2020/ecosystem/pages/work-packages/wp2.md create mode 100644 content/_spice-h2020/ecosystem/pages/work-packages/wp3.md create mode 100644 content/_spice-h2020/ecosystem/pages/work-packages/wp4.md create mode 100644 content/_spice-h2020/ecosystem/pages/work-packages/wp5.md create mode 100644 content/_spice-h2020/ecosystem/pages/work-packages/wp6.md create mode 100644 content/_spice-h2020/ecosystem/pages/work-packages/wp7.md create mode 100644 content/_spice-h2020/ecosystem/pages/work-packages/wp8.md diff --git a/content/_spice-h2020/ecosystem/.github/initialize.md b/content/_spice-h2020/ecosystem/.github/initialize.md new file mode 100644 index 00000000..576b472b --- /dev/null +++ b/content/_spice-h2020/ecosystem/.github/initialize.md @@ -0,0 +1,7 @@ +--- +title: missing repositories.txt +labels: bug +--- +Create a file called `repositories.txt` in the main folder of the repo. + +Include the full path of repositories that you want to monitor (each on a separate line). diff --git a/content/_spice-h2020/ecosystem/README.md b/content/_spice-h2020/ecosystem/README.md new file mode 100644 index 00000000..d9c5bdf8 --- /dev/null +++ b/content/_spice-h2020/ecosystem/README.md @@ -0,0 +1,83 @@ +# Polifonia Ecosystem + +This project is under development. See +https://polifonia-project.github.io/ecosystem/ for the live version of this website. + +## Contributing + +The website is built with [Jekyll](https://jekyllrb.com/) and hosted on [Github +pages](https://pages.github.com/). It means that one needs to write the code on +their machine in a git repository and then push it on Github. Then, Github will +render the content as browsable static pages (with CSS/JS). + +### Packages + +Jekyll is written in Ruby and Ruby packages are managed with `gem`. So, as [this +page](https://jekyllrb.com/docs/installation/) states, you will need to have +Ruby and Gem installed on your computer. + +[Bundler](https://bundler.io/) is also a recommended gem (required?) to work +with Jekyll. + +### Setup Jekill + + + +Install jekyll and bundler with: + +```bash +gem install jekyll bundler +``` + +Go into the directory and run the following to grab the dependencies (it may be +rather long): + +```bash +bundle install +``` + +Then, you should be able to run + +``` +bundle exec jekyll serve --config=_config.yml,_config-local.yml --livereload +``` + +to have Jekyll `serve` a version of this website (you don't need any other web +server such as Apache or Nginx). A small message should tell you the state of +the rendering process and then invite you to open +[http://127.0.0.1:4000/ecosystem/](http://127.0.0.1:4000/ecosystem/) in your computer. + +You may leave this terminal open and start hacking the code with your tool of +choice, it will detect the changes and update automatically (a simple refresh of +the page in the browser is needed though). + +After a satisfactory version is obtained, don't forget to commit (`git add `, +`git commit -m""`) your changes and push them on Github (`git push`), so that +the world may see them. Obvisouly, try to not commit a broken version (ie: which does not +compile with Jekyll). + +Then, clone the code from this website in a folder on your computer: +```bash +git clone git@github.com:polifonia-project/ecosystem.git + +``` + +### Setup REECO + + +Configure `repositories.txt` with the info about where to extract reeco annotations. This is an example: +```bash +polifonia-project/rulebook:branches:main +polifonia-project/external-components:branches:main +polifonia-project/folk_ngram_analysis:branches:master +polifonia-project/clef:branches:master +polifonia-project/registry_app:branches:main +polifonia-project/lharp:branches:master +``` + + + +## Need help with this procedure + +Open an issue and ask for help! + diff --git a/content/_spice-h2020/ecosystem/VERSIONING.md b/content/_spice-h2020/ecosystem/VERSIONING.md new file mode 100644 index 00000000..d49e0c10 --- /dev/null +++ b/content/_spice-h2020/ecosystem/VERSIONING.md @@ -0,0 +1,79 @@ +# Changelog + +## v2.0-RC1 + +Repositories included in this version: + +- polifonia-project/rulebook +- polifonia-project/external-components +- polifonia-project/folk_ngram_analysis +- polifonia-project/clef +- polifonia-project/registry_app +- polifonia-project/lharp +- polifonia-project/documentary-evidence-benchmark +- polifonia-project/ontology-network +- polifonia-project/stories +- polifonia-project/textual-corpus-population +- polifonia-project/polifonia-lexicon +- polifonia-project/dashboard +- polifonia-project/web_portal +- polifonia-project/licences +- polifonia-project/Polifonia-Corpus-Web-API +- polifonia-project/music-analysis-ontology +- polifonia-project/tonalities_pilot +- polifonia-project/meetups-knowledge-graph +- polifonia-project/meetups_pilot +- polifonia-project/meetups_corpus_collection +- polifonia-project/meetups-ontology +- polifonia-project/meetups-application +- polifonia-project/meetups-ui-design +- polifonia-project/ontology-network +- polifonia-project/music-meta-ontology +- polifonia-project/instrument-ontology +- polifonia-project/source-ontology +- polifonia-project/tunes-ontology +- polifonia-project/cometa-ontology +- polifonia-project/deep-listening +- polifonia-project/broadcast-concerts-knowledge-graph +- polifonia-project/musicbo-knowledge-graph +- polifonia-project/Polifonia-Corpus +- smashub/choco +- smashub/harmory +- polifonia-project/pitchcontext +- polifonia-project/patterns-knowledge-graph + +## v1.1 + +Repositories versions: + +- rulebook: v1.0 (tag) +- external-components: v1.0 (tag) +- folk\_ngram\_analysis: v0.4-dev.ecosystem-v1.0 (tag) +- clef: v1.0.0 (tag) +- registry_app: v0.1.5 (tag) +- lharp: v1.0.0 (tag) +- documentary-evidence-benchmark: v1.1 (tag) +- ontology-network: v0.1 (tag) +- stories: v1.0 (tag) +- textual-corpus-population: v0.1.ecosystem-v1.0 (tag) +- meetups_pilot: v0.1 (tag) +- meetups\_corpus\_collection: v1.0 (tag) +- polifonia-lexicon: v1_0 (tag) +- dashboard: v0.1.1 (tag) +- web_portal: v0.0.1 (tag) + +## v1.0 + +Repositories versions: + +- rulebook: v1.0 (tag) +- external-components: v1.0 (tag) +- folk\_ngram\_analysis: v0.4-dev.ecosystem-v1.0 (tag) +- clef: v1.0.0 (tag) +- registry\_app: v0.1.5 (tag) +- lharp: v1.0.0 (tag) +- documentary-evidence-benchmark: v1.0 (tag) +- ontology-network: v0.1 (tag) +- stories: v1.0 (tag) +- textual-corpus-population: v0.1.ecosystem-v1.0 + diff --git a/content/_spice-h2020/ecosystem/_data/collections.md b/content/_spice-h2020/ecosystem/_data/collections.md new file mode 100644 index 00000000..8315d6d4 --- /dev/null +++ b/content/_spice-h2020/ecosystem/_data/collections.md @@ -0,0 +1,20 @@ +--- +layout: default +title: Collections (dev) +nav_order: 9 +permalink: /collections.html +--- + +# Collections + +
    +{% for coll in site.collections %} +
  • {{ coll.label }} +
      +{% for d in coll.docs %} +
    • {{ d.id }}
    • +{% endfor %} +
    +
  • +{% endfor %} +
diff --git a/content/_spice-h2020/ecosystem/_posts/2012-05-22-Vivamus-porttitor-porta-tortor.md b/content/_spice-h2020/ecosystem/_posts/2012-05-22-Vivamus-porttitor-porta-tortor.md new file mode 100644 index 00000000..5a5c3cd3 --- /dev/null +++ b/content/_spice-h2020/ecosystem/_posts/2012-05-22-Vivamus-porttitor-porta-tortor.md @@ -0,0 +1,12 @@ +--- +layout: post +title: "Vivamus porttitor porta tortor" +date: 2012-05-22 16:25:06 -0700 +comments: true +--- + +Vivamus porttitor porta tortor at ullamcorper. Proin vel nisi magna. Vivamus vel velit laoreet, malesuada nunc nec, pulvinar velit. Vivamus sollicitudin ex quam, sit amet aliquet velit euismod in. Etiam rutrum augue orci, sed euismod risus vehicula eu. Proin quis velit mattis, sodales erat vitae, condimentum nulla. Vestibulum vitae imperdiet ligula. Etiam venenatis ultrices varius. + +Vestibulum vulputate nulla hendrerit velit tempor, et fringilla urna placerat. In hac habitasse platea dictumst. Vestibulum sit amet molestie tortor, eu posuere magna. Donec rhoncus pharetra urna sed tempor. Aliquam erat volutpat. Nulla facilisi. Nam at ante condimentum, egestas massa sed, auctor orci. Aenean tincidunt, turpis et venenatis finibus, orci urna tempor ex, at mattis nisi nisi varius nisl. Pellentesque diam libero, dignissim vel nisl ut, placerat pharetra felis. + +Curabitur venenatis neque eget odio tempor, vitae condimentum quam aliquam. Duis dui odio, auctor non ultricies nec, mollis nec elit. Pellentesque habitant morbi tristique senectus et netus et malesuada fames ac turpis egestas. Sed eget elit at nisl auctor egestas. Vivamus nec turpis feugiat, dapibus metus et, fermentum lacus. Curabitur at blandit diam, non rutrum nulla. Quisque eget fermentum libero, in bibendum diam. Vestibulum eget porta est, in scelerisque metus. Donec in elit aliquet sapien ultrices tincidunt. diff --git a/content/_spice-h2020/ecosystem/_posts/2013-05-22-Nulla-vel-risus-dapibus.md b/content/_spice-h2020/ecosystem/_posts/2013-05-22-Nulla-vel-risus-dapibus.md new file mode 100644 index 00000000..d5cc6a46 --- /dev/null +++ b/content/_spice-h2020/ecosystem/_posts/2013-05-22-Nulla-vel-risus-dapibus.md @@ -0,0 +1,12 @@ +--- +layout: post +title: "Nulla vel risus dapibus" +date: 2013-05-22 16:25:06 -0700 +comments: true +--- + +Nulla vel risus dapibus, fringilla nunc id, interdum magna. Vivamus non diam egestas, bibendum elit sed, condimentum quam. Integer eu ipsum ultrices, tincidunt nibh at, auctor sem. Duis iaculis purus a aliquet gravida. Sed ornare, leo venenatis dignissim condimentum, nibh arcu iaculis dui, nec vulputate ante eros laoreet sem. Fusce dapibus, ante eu blandit tincidunt, odio quam vulputate libero, et interdum tellus lorem eleifend nisi. Nam elementum vel sapien sed rhoncus. Praesent commodo neque odio. Praesent a nisl nec neque laoreet dignissim. Quisque vitae felis a nisl sodales consequat ut ac mi. Etiam varius gravida accumsan. In sed lectus nec ipsum commodo efficitur. Ut vehicula diam eu justo pellentesque, in pulvinar lorem dapibus. Donec ornare metus vitae turpis malesuada, ut aliquet dolor vulputate. Aenean eget ipsum elit. Suspendisse tempor sagittis dictum. + +Vivamus dapibus justo vitae tellus dignissim, non interdum odio egestas. Maecenas tincidunt sem non consequat bibendum. Aliquam cursus, enim sed rutrum porta, nisl tellus ultrices ipsum, vel vestibulum orci tellus sit amet quam. Pellentesque ut viverra lacus. Suspendisse potenti. Ut augue enim, hendrerit sed interdum sed, ullamcorper sit amet ex. Donec at mi at erat hendrerit commodo at non eros. Fusce commodo nec quam at rhoncus. + +Aliquam molestie urna at turpis venenatis, et placerat lorem volutpat. Sed gravida arcu id lectus viverra eleifend. Sed in metus sit amet ante luctus dignissim. Etiam in sodales justo, in iaculis odio. Vestibulum accumsan felis vitae cursus pharetra. Nulla congue ipsum est, sed vulputate odio pulvinar id. Maecenas a sollicitudin turpis. diff --git a/content/_spice-h2020/ecosystem/_posts/2013-06-22-Cum-sociis-natoque-penatibus.md b/content/_spice-h2020/ecosystem/_posts/2013-06-22-Cum-sociis-natoque-penatibus.md new file mode 100644 index 00000000..990b2820 --- /dev/null +++ b/content/_spice-h2020/ecosystem/_posts/2013-06-22-Cum-sociis-natoque-penatibus.md @@ -0,0 +1,12 @@ +--- +layout: post +title: "Cum sociis natoque penatibus" +date: 2013-06-22 16:25:06 -0700 +comments: true +--- + +Cum sociis natoque penatibus et magnis dis parturient montes, nascetur ridiculus mus. Phasellus fermentum facilisis velit eu auctor. Maecenas tincidunt, leo tempor bibendum auctor, ligula lorem ultricies tellus, ac auctor lorem libero a sapien. Donec ac suscipit tellus. Quisque vitae placerat lorem. In ullamcorper malesuada risus, eget fringilla lacus dignissim at. Proin faucibus, nibh vel molestie scelerisque, lorem urna tempus lacus, id viverra odio dolor sit amet odio. In hendrerit, orci vel scelerisque luctus, arcu purus aliquet turpis, a bibendum nulla est et dui. + +Praesent pellentesque posuere lectus eget condimentum. Ut vitae nisi diam. Quisque vitae ipsum magna. Aliquam pretium laoreet tortor quis volutpat. Donec congue, nisl nec consequat varius, enim enim consectetur felis, a viverra libero elit in ligula. Cras posuere ipsum vel mi scelerisque, eu interdum velit elementum. Duis eu posuere est. Ut vestibulum urna eu viverra fringilla. Aliquam tempus nisi eros, vitae posuere nulla fermentum in. Praesent et justo eros. Proin eleifend justo vel justo condimentum ullamcorper. Curabitur vel vehicula lectus. Mauris sed ex ac ipsum ultrices bibendum at id tortor. Aenean dictum magna ac nisi posuere euismod. Ut fermentum, nulla quis venenatis varius, risus nulla dictum felis, ut dictum eros libero vitae justo. + +Integer aliquam tellus vel libero eleifend, condimentum euismod odio tincidunt. Vivamus felis ante, faucibus quis urna nec, volutpat pulvinar quam. Cras dictum libero ac augue bibendum, et pretium ex pharetra. Vivamus suscipit et erat id eleifend. Proin vulputate, quam sit amet pretium fermentum, felis neque scelerisque metus, a rhoncus quam nisi sit amet urna. Sed et commodo libero, laoreet rutrum eros. Vivamus tempor, leo eget scelerisque molestie, sapien augue viverra tortor, et semper arcu eros ut elit. Sed pulvinar ipsum in semper facilisis. diff --git a/content/_spice-h2020/ecosystem/_posts/2014-06-22-Maecenas-feugiat-fringilla-nibh.md b/content/_spice-h2020/ecosystem/_posts/2014-06-22-Maecenas-feugiat-fringilla-nibh.md new file mode 100644 index 00000000..7334ff19 --- /dev/null +++ b/content/_spice-h2020/ecosystem/_posts/2014-06-22-Maecenas-feugiat-fringilla-nibh.md @@ -0,0 +1,12 @@ +--- +layout: post +title: "Maecenas feugiat fringilla nibh" +date: 2014-06-22 16:25:06 -0700 +comments: true +--- + +Maecenas feugiat fringilla nibh ut mattis. Sed non metus sit amet mi luctus feugiat in quis sem. Vivamus pulvinar commodo bibendum. Interdum et malesuada fames ac ante ipsum primis in faucibus. Maecenas sapien nulla, eleifend in dolor et, rutrum maximus velit. Lorem ipsum dolor sit amet, consectetur adipiscing elit. Suspendisse tempus malesuada egestas. Phasellus pulvinar vulputate urna in tempor. In vel sapien ullamcorper, condimentum ipsum ut, porttitor turpis. Nam auctor erat sed lectus tempus euismod. Nunc ipsum quam, condimentum ac bibendum sit amet, sodales nec metus. + +Vivamus a ullamcorper ipsum. Donec dictum eleifend massa, rhoncus consequat sem tempor nec. Ut bibendum luctus gravida. Nullam eleifend laoreet quam, nec tincidunt dolor tincidunt non. Aenean vel magna massa. Pellentesque sit amet tincidunt ante. Nulla posuere varius elit eu consequat. Sed vitae tortor scelerisque lacus eleifend condimentum. Nulla vitae neque sed lorem sagittis pretium eu nec est. Aliquam in euismod risus, vel vulputate orci. + +Praesent sit amet auctor justo. Suspendisse pretium rutrum vehicula. Cras ut porta urna. Morbi massa odio, eleifend vitae dictum eget, mattis nec metus. Praesent pellentesque metus eu massa pharetra facilisis. Suspendisse potenti. Vestibulum pellentesque pharetra tristique. Nam ut lobortis felis. Proin sit amet consequat ipsum. Curabitur et mattis justo, sit amet feugiat lectus. Sed eleifend eget arcu non pretium. Suspendisse rhoncus erat quis leo laoreet rhoncus. Aliquam quis metus vitae enim cursus sodales. In euismod tortor id odio fermentum, non pretium ipsum iaculis. Quisque consectetur elementum nisi, et posuere odio vulputate ac. Ut consequat, velit eget fermentum ultrices, ligula odio fringilla diam, sit amet accumsan elit velit tempus dolor. diff --git a/content/_spice-h2020/ecosystem/_posts/2014-07-22-Lorem-ipsum-dolor-sit-amet.md b/content/_spice-h2020/ecosystem/_posts/2014-07-22-Lorem-ipsum-dolor-sit-amet.md new file mode 100644 index 00000000..1943f338 --- /dev/null +++ b/content/_spice-h2020/ecosystem/_posts/2014-07-22-Lorem-ipsum-dolor-sit-amet.md @@ -0,0 +1,12 @@ +--- +layout: post +title: "Lorem ipsum dolor sit amet" +date: 2014-07-22 16:25:06 -0700 +comments: true +--- + +Lorem ipsum dolor sit amet, consectetur adipiscing elit. Sed quam metus, commodo sit amet ante a, finibus efficitur lorem. Maecenas egestas purus in tempor volutpat. Sed dapibus tortor nec sem suscipit ullamcorper. Nulla nec lorem lacus. Phasellus condimentum massa quis dolor consequat viverra ut ac magna. Ut a consequat nisi. Vivamus at leo ut turpis convallis lacinia. Curabitur eu placerat quam. Donec ultricies faucibus dui, a tincidunt lorem lobortis condimentum. + +Quisque aliquet consectetur justo sit amet convallis. Nunc vel aliquet ipsum, sit amet elementum justo. Vivamus id magna mi. Cras luctus est vel ipsum sagittis pellentesque. Vivamus ante elit, porttitor vitae quam quis, fermentum malesuada risus. Integer nec lectus vel lacus cursus tristique in euismod ipsum. Duis ut varius enim. Cum sociis natoque penatibus et magnis dis parturient montes, nascetur ridiculus mus. Morbi et elit eu tortor lacinia sollicitudin non in lacus. + +Integer at viverra turpis. Duis aliquam mattis sapien tempor eleifend. Mauris nec eleifend risus, quis mollis neque. Vivamus non dapibus justo, vel ornare magna. Nunc in nulla venenatis, imperdiet diam in, accumsan massa. Etiam congue augue ipsum, sit amet rutrum nisi blandit quis. Cras in lectus non lorem auctor consequat a vel sem. Aliquam erat volutpat. Fusce lobortis vel orci vitae ullamcorper. Phasellus id eleifend eros. Mauris vulputate, nisi vel auctor auctor, quam enim tincidunt felis, vitae fermentum odio tortor eget est. Integer ornare blandit lectus a accumsan. diff --git a/content/_spice-h2020/ecosystem/_posts/2014-08-22-jekyll-clean-theme.md b/content/_spice-h2020/ecosystem/_posts/2014-08-22-jekyll-clean-theme.md new file mode 100644 index 00000000..dc8de782 --- /dev/null +++ b/content/_spice-h2020/ecosystem/_posts/2014-08-22-jekyll-clean-theme.md @@ -0,0 +1,197 @@ +--- +layout: post +title: "Jekyll Clean Theme" +date: 2014-08-22 16:25:06 -0700 +comments: false +--- + +* Get it from [github](https://github.com/scotte/jekyll-clean). +* See the [live demo](https://scotte.github.io/jekyll-clean). +* See it [in action on my own blog](https://scotte.org). + +Welcome to the sample post for the Jekyll Clean theme. + +A simple and clean Jekyll theme using [bootstrap](http://getbootstrap.com) +(not to be confused with jekyll-bootstrap) that's easy to modify and very +modular in component and element reuse. + +It uses Disqus for comments and includes Google Analytics support. Both of +these features are disabled by default and can be enabled via \_config.yml. You +can also rip this code out of the templates if you like (footer.html and post.html). +The beauty of Jekyll - keep things clean... Jekyll Clean! + +The theme works well on mobile phones, using a collapsable nav bar and hiding the +sidebar. The links pane in the sidebar is available on mobile through the nav menu, +and you can do the same thing for any other sections added to the sidebar. + +Don't forget to occassionally merge against my upstream repository so you can get +the latest changes. Pull requests are encouraged and accepted! + +Installation +============ + +If you don't have a blog already on github, start by cloning this repository. +Best to do that directly on github and then clone that down to your computer. + +If you already do have a blog, You can certainly apply this theme to your existing +blog in place, but then you won't be able to merge as the theme changes. If you +re-apply your blog history on top of this theme's **gh-pages** branch, it's then +easy to update to the latest version of the theme. You also don't want to have to +deal with resolving old conflicts from your existing history, so you may wish to to +push your existing master off to a new branch so you have the old history and start +a new branch with this as the start, merging in your \_posts and other assets (after +git rm'ing the current \_posts. + +Not ideal, but you have to make a choice - either apply it manually or base your +blog off this theme's branch. Either way it will work, and both have their own +pros and cons. + +You can setup an upstream tracking repository like so: + +``` +$ git remote add upstream git@github.com:scotte/jekyll-clean.git +``` +And now when you wish to merge your own branch onto the latest version of the +theme, simply do: + +``` +$ git fetch upstream +$ git merge upstream/gh-pages +``` + +Of course you will have to resolve conflicts for \_config.yml, \_includes/links-list.html, +and \_posts, and so on, but in practice this is pretty simple. + +This is how I maintain my own blog which is based on this theme. The old history is +sitting in an **old-master** branch that I can refer to when I need to. + +Running Locally +=============== + +Here's the exact set of packages I need to install on Debian to run jekyll +locally with this theme for testing. + +``` +$ sudo aptitude install ruby ruby-dev rubygems nodejs +$ sudo gem install jekyll jekyll-paginate +``` + +And then it's just a simple matter of running jekyll locally: + +``` +$ jekyll serve --baseurl='' +``` + +Now browse to http://127.0.0.1:4000 + +Using gh-pages +============== + +Running a jekyll site is a bit outside the scope of this doc, but +sometimes it can be a bit confusing how to configure jekyll for +project pages versus user pages, for example. + +To start with, read through +[the documentation here](https://help.github.com/articles/user-organization-and-project-pages/). +This will provide a good overview on how it all works. The git branch and +baseurl (in _config.yml) will change depending on the sort of site deployed. + +When you clone this repository, it's set up for project pages, so the +deployed branch is "gh-pages" and baseurl is configured to 'jekyll-clean', +because that's the name of this project. + +If you plan to deploy this as user pages, the deployed branch is "master" +and baseurl is configured to '' (i.e. empty). + +Comment Systems +=============== + +Jekyll clean supports both [isso](https://posativ.org/isso) and +[disqus](https://disqus.com) comment systems. + +After enabling **comments**, either **isso** or **disquss** must +be configured. Don't try configuring both! + +Isso Comments +============= + +Isso requires running a local server, so is not suitable for hosting +in github pages, for example. Isso is open source and keeps all your +data local, unlike Disqus (who knows exactly what they are doing with +your data). + +In _config.yml you'll need to set **isso** to the fully-qualified URL +if your isso server (this is the value for **data-isso** passed to the +isso JS). Make sure **comments** is true. + + +Disqus Comments +=============== + +Getting Disqus to work can be a bit more work than it seems like it should be. +Make sure your Disqus account is correctly configured with the right domain +of your blog and you know your Disqus shortname. + +In _config.yml you'll need to set **disqus** to your Disqus shortname and +make sure **comments** is true. + +Finally, in posts, make sure you have **comments: true** in the YAML front +matter. + +More information on using Disqus with Jekyll is +[documented here](https://help.disqus.com/customer/portal/articles/472138-jekyll-installation-instructions). + +Code Syntax Highlighting +======================== + +To use code syntax highlighting, use the following syntax: + +``` +```python +import random + +# Roll the die +roll = random.randint(1, 20) +print('You rolled a %d.' % roll) +``` #REMOVE +``` + +(Remove #REMOVE from the end of the last line). Which will look like this in +the rendered jekyll output using the default css/syntax.css provided with this +theme (which is the **colorful** theme from [https://github.com/iwootten/jekyll-syntax](https://github.com/iwootten/jekyll-syntax)): + +```python +import random + +# Roll the die +roll = random.randint(1, 20) +print('You rolled a %d.' % roll) +``` + +You can, of course, use any theme you wish, see the jekyll and pygments +documentation for more details. + +License +======= + +The content of this theme is distributed and licensed under a +![License Badge]({{ site.baseurl}}/images/cc_by_88x31.png) +[Creative Commons Attribution 4.0 License](https://creativecommons.org/licenses/by/4.0/legalcode) + + This license lets others distribute, remix, tweak, and build upon your work, + even commercially, as long as they credit you for the original creation. This + is the most accommodating of licenses offered. Recommended for maximum + dissemination and use of licensed materials. + +In other words: you can do anything you want with this theme on any site, just please +provide a link to [the original theme on github](https://github.com/scotte/jekyll-clean) +so I get credit for the original design. Beyond that, have at it! + +This theme includes the following files which are the properties of their +respective owners: + +* js/bootstrap.min.js - [bootstrap](http://getbootstrap.com) +* css/bootstrap.min.css - [bootstrap](http://getbootstrap.com) +* js/jquery.min.js - [jquery](https://jquery.com) +* images/cc_by_88x31.png - [creative commons](https://creativecommons.org) +* css/colorful.css - [iwootten/jekyll-syntax](https://github.com/iwootten/jekyll-syntax) diff --git a/content/_spice-h2020/ecosystem/content/_polifonia-project/Polifonia-Corpus-Web-API/README.md b/content/_spice-h2020/ecosystem/content/_polifonia-project/Polifonia-Corpus-Web-API/README.md new file mode 100644 index 00000000..5daa1902 --- /dev/null +++ b/content/_spice-h2020/ecosystem/content/_polifonia-project/Polifonia-Corpus-Web-API/README.md @@ -0,0 +1,82 @@ +--- +component-id: Polifonia-Corpus-Web-API +type: WebApplication +name: Polifonia Corpus Web API +description: Source code of the Polifonia Corpus web application +work-package: +- WP1 +project: polifonia-project +resource: https://polifonia.disi.unibo.it/corpus/ +demo: https://polifonia.disi.unibo.it/corpus/ +release-date: 2023-03-15 +release-number: latest +release link: https://github.com/polifonia-project/Polifonia-Corpus-Web-API/releases/latest +doi: 10.5281/zenodo.7736486 +licence: +- CC0 +contributors: +- Marco Grasso +- Rocco Tripodi +related-components: +- reuses: + - Polifonia-Corpus +--- + + +# Polifonia Corpus Web Application +This repository contains the source code of the web application created for interrogating the Polifonia-Corpus APIs. The designed interactive dashboard has been created to easily access the Polifonia Corpus and carries a user-friendly design based on a music player. + + +[![DOI](https://zenodo.org/badge/577352811.svg)](https://zenodo.org/badge/latestdoi/577352811) + +### Data +This repository is missing db files, which should be grouped in a folder called "annotations". +The annotations folder contains two other folders, one for the database files (annotations/db) and another one for metadata (annotations/metadata). + +``` +Polifonia-Corpus-Web-API +│ README.md +│ app.py +│ requirements.txt +│ .gitignore +│ .gitattributes +└───annotations +│ │ +│ └───db +│ │ Books-EN.db +│ │ Wikipedia-EN.db +│ │ ... +│ └───metadata +│ books_corpus_metadataEN.tsv +│ wikipedia_corpus_metadataEN.tsv +│ ... +└───interrogation +└───static +└───templates + +``` + + +Database file for *pilot* modules are named as follows: +`` Pilots-{Pilot name}-{LANGUAGE}.db `` + +Database file for *other* modules are named as follows: +`` {Module name}-{LANGUAGE}.db `` + + + +### Files + +The db collection includes 3 modules (Books, Wikipedia, Periodicals), which are available in 6 languages each (IT, EN, ES, FR, NL, DE), for a total of 18 files. + +- Books-EN.db +- Wikipedia-EN.db +- Periodicals-EN.db + +The db collection also includes 5 pilot db files, which are only available in their specific language as listed below. + +- Pilots-Bells-IT.db +- Pilots-Child-EN.db +- Pilots-Meetups-EN.db +- Pilots-Musicbo-EN.db +- Pilots-Organs-NL.db diff --git a/content/_spice-h2020/ecosystem/content/_polifonia-project/Polifonia-Corpus/README.md b/content/_spice-h2020/ecosystem/content/_polifonia-project/Polifonia-Corpus/README.md new file mode 100755 index 00000000..1df4854a --- /dev/null +++ b/content/_spice-h2020/ecosystem/content/_polifonia-project/Polifonia-Corpus/README.md @@ -0,0 +1,228 @@ +--- +component-id: Polifonia-Corpus +name: Polifonia Corpus +description: Data, metadata, statistics, annotations and interrogation APIs of the Polifonia Textual Corpus. +type: Corpus +release-date: 28/06/2022 +release-number: v0.1.3 +work-package: +- WP4 +licence: CC-BY_v4 +links: +- https://github.com/polifonia-project/Polifonia-Corpus +credits: +- https://github.com/roccotrip +- https://github.com/arianna-graciotti +- https://github.com/EleonoraMarzi +--- + +# Polifonia Textual Corpus + +This repository contains the script to access, parse, annotate and interrogate the data and metadata of the Polifonia Textual Corpus. + +The high level structure of the repository is the following: + +``` +Polifonia-Corpus +│ README.md +│ wikipedia_corpus_parser.py +| wikipedia_corpus_reader.py +│ +└───annotations +│ │ README.md +│ │ +│ └───db +│ │ Wikipedia_EN.db +│ │ Periodicals_EN.db +│ │ Books_EN.db +| | ........ +| | "Module"_"Lang".db +│ +└───interrogation +| │ README.md +| │ interrogate.py +| | +| |___data +| | lex_ent_map.pkl +| | pages.pkl +| +|___utils + | db_utils.py +``` + +The root folder contains the script to access and parse the Polifonia Corpus data and metadata that are linked in this README.md file. + +The annotations folder contains a README.md file in which it is explained how the corpus was annotated. A "db" subfolder of the "annotations" folder is set up to store the databases with the annotations of the corpus that will be used for the interrogations of the corpus. The databases will be downloaded automatically the first time each module will be queried. The links for the download are listed in the "urls.csv" file. + +The interrogation folder contains a README.md file that explain how to interrogate the corpus. It contains a "data" subfolder used to link mentions, named entities and Wikipedia page titles. + +## The corpus + +The corpus is dived into four modules: +- the Wikipedia module +- the Books module +- the Periodicals module +- the Polifonia Pilots module + +Each module (except the Pilot module) contains documents in six languages: Dutch (NL), English (EN), French (FR), German (DE),Italian (IT) and Spanish (ES). + +### The Wikipedia module +It was created selecting from **[BabelNet domains](http://lcl.uniroma1.it/babeldomains/)** all the **[Wikipedia](https://www.wikipedia.org)** musical pages. + +#### Metadata +The metadata of the module can be downloaded from: + +| lang | url | +|-------|-----------------------------------------------------------------------------------------------------------| +| DE | [![DOI](https://zenodo.org/badge/DOI/10.5281/zenodo.6671494.svg)](https://doi.org/10.5281/zenodo.6671494) | +| EN | [![DOI](https://zenodo.org/badge/DOI/10.5281/zenodo.6671510.svg)](https://doi.org/10.5281/zenodo.6671510) | +| ES | [![DOI](https://zenodo.org/badge/DOI/10.5281/zenodo.6670984.svg)](https://doi.org/10.5281/zenodo.6670984) | +| FR | [![DOI](https://zenodo.org/badge/DOI/10.5281/zenodo.6671560.svg)](https://doi.org/10.5281/zenodo.6671560) | +| IT | [![DOI](https://zenodo.org/badge/DOI/10.5281/zenodo.6671571.svg)](https://doi.org/10.5281/zenodo.6671571) | +| NL | [![DOI](https://zenodo.org/badge/DOI/10.5281/zenodo.6671519.svg)](https://doi.org/10.5281/zenodo.6671519) | + +#### Data + +The data of the module can be downloaded from: + +| lang | url | +|------|----------------| +| DE | [![DOI](https://zenodo.org/badge/DOI/10.5281/zenodo.6671663.svg)](https://doi.org/10.5281/zenodo.6671663)| +| EN | [![DOI](https://zenodo.org/badge/DOI/10.5281/zenodo.6671681.svg)](https://doi.org/10.5281/zenodo.6671681)| +| ES | [![DOI](https://zenodo.org/badge/DOI/10.5281/zenodo.6671673.svg)](https://doi.org/10.5281/zenodo.6671673)| +| FR | [![DOI](https://zenodo.org/badge/DOI/10.5281/zenodo.6671728.svg)](https://doi.org/10.5281/zenodo.6671728)| +| IT | [![DOI](https://zenodo.org/badge/DOI/10.5281/zenodo.6671734.svg)](https://doi.org/10.5281/zenodo.6671734)| +| NL | [![DOI](https://zenodo.org/badge/DOI/10.5281/zenodo.6671738.svg)](https://doi.org/10.5281/zenodo.6671738)| + + +#### Statistics + +Some statistics of the module are provided below: + +| lang | #documents | #sentences | #tokens | #types | #links | entities | +|------|------------|------------|---------|---------|------------|---------| +| DE | 53.986 | 1.459.265 | 44.523.547 | 9.732.779 | 12.561.177 | 2.197.438| +| EN | 250.413 | 7.362.272 | 198.257.649 | 1.191.901 | 54.059.979 | 25.786.043 | +| ES | 57.891 | 1.247.583 | 36.229.557 | 537.465 | 7.171.759 | 2.996.185 | +| FR | 65.970 | 2.901.295 | 82.979.944 | 653.489 | 19.208.818 | 6.212.997 | +| IT | 77.986 | 1.548.981 | 47.497.487 | 491.500 | 14.519.636 | 2.649.949 | +| NL | 36.609 | 1.246.881 | 23.539.528 | 479.962 | 4.716.170 | 2.453.332 | + +### The Books module +It was created using the **[Polifonia Textual Corpus Population](https://github.com/polifonia-project/textual-corpus-population)** module that allows to access different digital libraries (such as **[BNF](https://gallica.bnf.fr)** and **[BNE](http://www.bne.es)**) and to select from them documents related to music. The PTCPM allows also to perform optical character recognition (OCR) on images and PDF files. + +#### Metadata +The metadata of the module can be downloaded from: + +| lang | url | +|------|-----------------------------------------------------------------------------------------------------------| +| DE | [![DOI](https://zenodo.org/badge/DOI/10.5281/zenodo.6772115.svg)](https://doi.org/10.5281/zenodo.6772115) | +| EN | [![DOI](https://zenodo.org/badge/DOI/10.5281/zenodo.6772136.svg)](https://doi.org/10.5281/zenodo.6772136) | +| ES | [![DOI](https://zenodo.org/badge/DOI/10.5281/zenodo.6772131.svg)](https://doi.org/10.5281/zenodo.6772131) | +| FR | [![DOI](https://zenodo.org/badge/DOI/10.5281/zenodo.6772142.svg)](https://doi.org/10.5281/zenodo.6772142) | +| IT | [![DOI](https://zenodo.org/badge/DOI/10.5281/zenodo.6772137.svg)](https://doi.org/10.5281/zenodo.6772137) | +| NL | [![DOI](https://zenodo.org/badge/DOI/10.5281/zenodo.6772145.svg)](https://doi.org/10.5281/zenodo.6772145) | + +#### Data + +The data of the module cannot be downloaded due to copyright issue. +However, it is possible to reconstruct the corpus using the metadata provided in the previous section. +Furthermore, the data processed and annotated can be accessed interrogating the corpus (how to interrogate the corpus is explained in a README.md file inside the interrogation folder of this repository). + + +#### Statistics + +Some statistics of the module are provided below: + +| lang | #documents | #sentences | #types | #tokens | +| ------ |------------|------------|-----------|------------| +| DE | 237 | 38.633 | 121.530 | 489.225 | +| EN | 360 | 49.595 | 185.280 | 940.232 | +| ES | 41.093 | 731.606 | 1.852.430 | 20.180.197 | +| FR | 265 | 633.173 | 1.305.283 | 14.354.611 | +| IT | 12200 | 202.730 | 405.099 | 2.571.090 | +| NL | 83 | 116.593 | 539.102 | 1.779.824 | + +### The Periodicals module +It was created with the help of musicologists that provided the titles of different influencial music periodicals. + +#### Metadata +The metadata of the module can be downloaded from: + +| lang | url | +|------|------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------| +| DE | [![DOI](https://zenodo.org/badge/DOI/10.5281/zenodo.6761779.svg)](https://doi.org/10.5281/zenodo.6761779)| +| EN | [![DOI](https://zenodo.org/badge/DOI/10.5281/zenodo.6671912.svg)](https://doi.org/10.5281/zenodo.6671912)| +| ES | [![DOI](https://zenodo.org/badge/DOI/10.5281/zenodo.6761787.svg)](https://doi.org/10.5281/zenodo.6761787)| +| FR | [![DOI](https://zenodo.org/badge/DOI/10.5281/zenodo.6761794.svg)](https://doi.org/10.5281/zenodo.6761794)| +| IT | [![DOI](https://zenodo.org/badge/DOI/10.5281/zenodo.6761806.svg)](https://doi.org/10.5281/zenodo.6761806)| +| NL | [![DOI](https://zenodo.org/badge/DOI/10.5281/zenodo.6761809.svg)](https://doi.org/10.5281/zenodo.6761809)| + +#### Data + +The data of the module cannot be downloaded due to copyright issue. +However, it is possible to reconstruct the corpus using the metadata provided in the previous section. +Furthermore, the data processed and annotated can be accessed interrogating the corpus (how to interrogate the corpus is explained in a README.md file inside the interrogation folder of this repository). + + +#### Statistics + +Some statistics of the module are provided below: + + +| lang | #documents | #sentences | #types | #tokens | +| ------ |------------|------------|-----------|------------| +| DE | 705 | 121.113 | 544.376 | 2.405.289 | +| EN | 2.868 | 4.400.015 | 7.342.527 | 76.180.398 | +| ES | 455 | 87.025 | 677.041 | 3.170.480 | +| FR | 349 | 329.166 | 696.427 | 5.111.915 | +| IT | 1.251 | 433.465 | 992.902 | 7.879.459 | +| NL | 125 | 187.350 | 716.506 | 3.880.499 | + + +### The Polifonia Pilots module +It was created collecting the textual material selected by five **[Polifonia Pilots](https://polifonia-project.eu/pilots/)**: +- BELLS +- CHILD +- MEETUPS +- MUSICBO +- ORGANS + +#### Metadata +The metadata of the module can be downloaded from: + +| Pilot | url | +|---------|---------| +| BELLS | [![DOI](https://zenodo.org/badge/DOI/10.5281/zenodo.6672061.svg)](https://doi.org/10.5281/zenodo.6672061)| +| CHILD | [![DOI](https://zenodo.org/badge/DOI/10.5281/zenodo.6672093.svg)](https://doi.org/10.5281/zenodo.6672093)| +| MEETUPS | [![DOI](https://zenodo.org/badge/DOI/10.5281/zenodo.6672133.svg)](https://doi.org/10.5281/zenodo.6672133)| +| MUSICBO | [![DOI](https://zenodo.org/badge/DOI/10.5281/zenodo.6672165.svg)](https://doi.org/10.5281/zenodo.6672165)| +| ORGANS | [![DOI](https://zenodo.org/badge/DOI/10.5281/zenodo.6672193.svg)](https://doi.org/10.5281/zenodo.6672193)| + + +#### Data + +The data of the Pilots Module of the Polifonia textual Corpus collected for Bells, MusicBo and Organs pilots cannot be published in their integral form because they are subject to heterogeneous license restrictions. The respective set of published metadata (see table above) allows for the reproduction of the whole corpora. Texts collected for Child and Meetups Pilots are royalty-free, therefore we report links to retrieve them from their corresponding GitHub repositories: + +| Pilot | url | +|---------|---------| +| CHILD | https://github.com/polifonia-project/documentary-evidence-benchmark/tree/main/corpus | +| MEETUPS | https://github.com/polifonia-project/meetups_pilot/tree/main/cleanText | + +However, it is possible to reconstruct the corpus using the metadata provided in the previous section. +Furthermore, the data processed and annotated can be accessed interrogating the corpus (how to interrogate the corpus is explained in a README.md file inside the interrogation folder of this repository). + + +#### Statistics + +Some statistics of the module are provided below: + +| pilot | #documents | #sentences | #types | #tokens | +|---------|------------|------------|-----------|------------| +| BELLS | 59 | 18.481 | 128.061 | 434.439 | +| CHILD | 30 | 157.815 | 361.550 | 3.442.840 | +| MEETUPS | 19.476 | 822.861 | 1.631.371 | 21.536.135 | +| MUSICBO | 46 | 51.781 | 289.247 | 1.412.456 | +| ORGANS | 1.660 | 25.647 | 45.298 | 368.439 | + + diff --git a/content/_spice-h2020/ecosystem/content/_polifonia-project/Polifonia-Corpus/annotations/README.md b/content/_spice-h2020/ecosystem/content/_polifonia-project/Polifonia-Corpus/annotations/README.md new file mode 100755 index 00000000..fef7370e --- /dev/null +++ b/content/_spice-h2020/ecosystem/content/_polifonia-project/Polifonia-Corpus/annotations/README.md @@ -0,0 +1,159 @@ + +# Annotations of the Polifonia Textual Corpus + +This repository contains the annotations of the Polifonia Textual Corpus. + +The corpus is dived into four modules: +- the Wikipedia module +- the Books module +- the Periodicals module +- the Polifonia Pilots module + +Each module (except the Pilot module) contains documents in six languages: Dutch (NL), English (EN), French (FR), German (DE),Italian (IT) and Spanish (ES). + +## Annotation Pipeline + +The annotations of the corpus was produced using cutting-edge Natural Language Processing technologies. +We annotated each text of the corpus with a NLP pipeline composed of: + +1. Sentece splitting +2. Tokenization +3. Lemmatization +4. Part-of-speech tagging +5. Word Sense Disambiguation +6. Named Entity Recognition +7. Entity Linking + +Steps 1-4 and 6 have been conducted using the **[SpaCy](http://spacy.io)** NLP library. +For each language of the corpus we used a dedicated SpaCy Model: + +| lang | model name | +|------|-----------------| +| DE | de_core_news_lg | +| EN | en_core_web_trf | +| ES | es_core_news_lg | +| FR | fr_core_news_lg | +| IT | it_core_news_lg | +| NL | nl_core_news_lg | + +Steps 5 and 7 require more sophisticated technologies. +For this reason, we used **[EWISER](https://github.com/SapienzaNLP/ewiser)** for step 5 (Word Sense Disambiguation) for English. +The other languages of the corpus have been annotated using a new system developed within the project. +It exploits recent advantages on lexical semantics and in particular on the representation of word senses (**[ARES](http://sensembert.org/)**) and a powerful WSD model (**[WSD-games](https://github.com/roccotrip/wsd_games_emb)**). +The advantages of this new model are based on the fact that it is accurate, fast and can work efficiantly on different languages. +For step 7 we used **[ExTenD](https://github.com/SapienzaNLP/extend)** for English. +For the other language of the corpus we adapted **[WSD-games](https://github.com/roccotrip/wsd_games_emb)** to work on the entity linking task. +Also in this case the model is accurate, fast and can work efficiantly on different languages. + + +## Annotation Example +The annotations of the Polifonia Textual Corpus are provided in **[CoNLL-U format](https://universaldependencies.org/format.html)**. +Given an input sentence (from the English Wikipedia module) such as: + +> James H. Mathis Jr. (born August 1967), known as Jimbo Mathus, is an American singer-songwriter and guitarist, best known for his work with the swing revival band Squirrel Nut Zippers. + +the resulting annotation will start with metadata information: + + +> #polifonia_doc_id = 32607842_bn___02615097n.html + +Tha provides an unique identifier for the document and in this case is composed of two identifiers: the first one is the BabelNet id of the corresponding Wikipedia page (32607842_bn), the second part is the Wikipedia identifier of the page (02615097n). + +> #polifonia_sent_id = sent_0 + +Then there is a progressive number for each sentence the document. + +> #sent = James H. Mathis Jr. (born August 1967), known as Jimbo Mathus, is an American singer-songwriter and guitarist, best known for his work with the swing revival band Squirrel Nut Zippers. + +And then there is the text of the sentence. + +Afther the metadata there is the sentence annotation: + +| token_id | word form | lemma | POS | WordNet sense | NER class | NER BIO tag | Entity Linking ------- | is a musical concept? | +|-----------|----------------------------|-------|-----|---------------|-----------|-----------|---------------------|-----------------------| +| token_0 | James | James | PROPN | | PERSON | B | James H. Mathis Jr. | 0 | +| token_1 | H. | H. | PROPN | | PERSON | I | 0 | 0 | +| token_2 | Mathis | Mathis | PROPN | | PERSON | I | 0 | 0 | +| token_3 | Jr. | Jr. | PROPN | | PERSON | I | 0 | 0 | +| token_4 | ( | ( | PUNCT | | | O | 0 | 0 | +| token_5 | born | bear | VERB | wn:02518161v | | O | 0 | 0 | +| token_6 | August | August | PROPN | | DATE | B | August 1967 | 0 | 0| +| token_7 | 1967 | 1967 | NUM | | DATE | I | 0 | | | | | | | | +| token_8 | ) | ) | PUNCT | | | O | 0 | 0 | +| token_9 | , | , | PUNCT | | | O | 0 | 0 | +| token_10 | known | know | VERB | wn:01426397v | | O | 0 | 0 | +| token_11 | as | as | ADP | | | O | 0 | 0 | +| token_12 | Jimbo | Jimbo | PROPN | | PERSON | B | Jimbo Mathus | 0 | +| token_13 | Mathus | Mathus | PROPN | | PERSON | I | 0 | 0 | +| token_14 | , | , | PUNCT | | | O | 0 | 0 | +| token_15 | is | be | AUX | | | O | 0 | 0 | +| token_16 | an | an | DET | | | O | 0 | 0 | +| token_17 | American | american | ADJ | wn:02927512a | NORP | B | United States | 0 | +| token_18 | singer | singer | NOUN | wn:10599806n | | O | 0 | 1 | +| token_19 | - | - | PUNCT | | | O | 0 | 0| +| token_20 | songwriter | songwriter | NOUN | wn:10624540n | | O | 0 | 1 | +| token_21 | and | and | CCONJ | | | O | 0 | 0 | +| token_22 | guitarist | guitarist | NOUN | wn:10151760n | | O | 0 | 1 | +| token_23 | , | , | PUNCT | | | O | 0 | 0 | +| token_24 | best | well | ADV | wn:00011093r | | O | 0 | 0 | +| token_25 | known | know | VERB | wn:00596644v | | O | 0 | 0 | +| token_26 | for | for | ADP | | | O | 0 | 0 | +| token_27 | his | his | PRON | | | O | 0 | 0 | +| token_28 | work | work | NOUN | wn:05755883n | | O | 0 | 0 | +| token_29 | with | with | ADP | | | O | 0 | 0 | +| token_30 | the | the | DET | | | O | 0 | 0 | +| token_31 | swing | swing | NOUN | wn:07066042n | | O | 0 | 1 | +| token_32 | revival | revival | NOUN | wn:01047338n | | O | 0 | 0 | +| token_33 | band | band | NOUN | wn:08240169n | | O | 0 | 1 | +| token_34 | Squirrel | Squirrel | PROPN | | ORG | B | Squirrel Nut Zippers | 0 | 0 | +| token_35 | Nut | Nut | PROPN | | ORG | I | 0 | 0 | +| token_36 | Zippers | Zippers | PROPN | | ORG | I | 0 | 0 | +| token_37 | . | . | PUNCT | | | O | 0 | 0 | + + +## The Wikipedia module +It was created selecting from **[BabelNet domains](http://lcl.uniroma1.it/babeldomains/)** all the **[Wikipedia](https://www.wikipedia.org)** musical pages. + +### Annotations +The annotation of the module can be downloaded from: + +| lang | url | +|------|-------| +| DE | [![DOI](https://zenodo.org/badge/DOI/10.5281/zenodo.6702689.svg)](https://doi.org/10.5281/zenodo.6702689)| +| EN | [![DOI](https://zenodo.org/badge/DOI/10.5281/zenodo.6759156.svg)](https://doi.org/10.5281/zenodo.6759156)| +| ES | [![DOI](https://zenodo.org/badge/DOI/10.5281/zenodo.6759021.svg)](https://doi.org/10.5281/zenodo.6759021)| +| FR | [![DOI](https://zenodo.org/badge/DOI/10.5281/zenodo.6759025.svg)](https://doi.org/10.5281/zenodo.6759025)| +| IT | [![DOI](https://zenodo.org/badge/DOI/10.5281/zenodo.6759017.svg)](https://doi.org/10.5281/zenodo.6759017)| +| NL | [![DOI](https://zenodo.org/badge/DOI/10.5281/zenodo.6757537.svg)](https://doi.org/10.5281/zenodo.6757537)| + +## The Books module +It was created with the help of musicologists that provided the titles of different influencial music books. + +### Annotations + +At this stage, the annotations of the Books module are only available to the Polifonia consortium members in accordance with the heterogeneous licensing of the Books sources. Interested parties may contact us, and we will evaluate the sharing of the annotated data. + +## The Periodicals module +It was created with the help of musicologists that provided the titles of different influencial music periodicals. + +### Annotations + +At this stage, the annotations of the Periodicals module are only available to the Polifonia consortium members in accordance with the heterogeneous licensing of the Periodicals sources. Interested parties may contact us, and we will evaluate the sharing of the annotated data. + +## The Polifonia Pilots module +It was created collecting the textual material selected by five **[Polifonia Pilots](https://polifonia-project.eu/pilots/)**: +- BELLS +- CHILD +- MEETUPS +- MUSICBO +- ORGANS + + +### Annotations + +At this stage, the annotations of the Pilots module are only available to the Polifonia consortium members in accordance with the heterogeneous licensing of the Pilots sources. Interested parties may contact us, and we will evaluate the sharing of the annotated data. Texts collected for Child and Meetups Pilots are royalty-free, therefore we release their annotations that can be downloaded from the table below: + +| Pilot | url | +|---------|-------| +| CHILD | [![DOI](https://zenodo.org/badge/DOI/10.5281/zenodo.6759261.svg)](https://doi.org/10.5281/zenodo.6759261) | +| MEETUPS | [![DOI](https://zenodo.org/badge/DOI/10.5281/zenodo.6759272.svg)](https://doi.org/10.5281/zenodo.6759272) | diff --git a/content/_spice-h2020/ecosystem/content/_polifonia-project/Polifonia-Corpus/interrogation/README.md b/content/_spice-h2020/ecosystem/content/_polifonia-project/Polifonia-Corpus/interrogation/README.md new file mode 100755 index 00000000..677cfe4f --- /dev/null +++ b/content/_spice-h2020/ecosystem/content/_polifonia-project/Polifonia-Corpus/interrogation/README.md @@ -0,0 +1,133 @@ +# Interrogation Functionalities of Polifonia Textual Corpus + +The interrogation of the corpus takes advantage of the annotation described in the *[annotations](https://github.com/polifonia-project/Polifonia-Corpus/tree/master/annotations)* section of this repository. + +The main script to use to interrogate the corpus is: + + +> interrogate.py + +It has different parameters that can be used to select, navigate and store sentences of the corpus that satisfy aspecific query. +The parameters are explained below their use is described in the following sections. + +1. --annotations_path: indicates the path were the annotations databases are stored (the default value is annotation/db/) +2. --corpus: indicates what module of the corpus has to be queried. It can be Wikipedia, Books, Periodicals or Pilots +3. --lang: indicates the language to use for the interrogations. It can be DE, EN, ES, FR, IT or NL +4. --interrogation_type: indicates the type of interrogation that has to be conducted. It can be Keyword, concept or entity. Each interrogation type will be explained in the next sections. +5. --query: the keyword to use for the interrogation, e.g., guitar. +6. --sent_n: indicates the number of sentences to get at each interrogation. +7. --save_to_file: indicates if the results of the interrogations have to be saved to a file. The default value of this parameter is 'No'. + +# How to use this repository + +The first step is to clone the repository +``` +> git clone https://github.com/polifonia-project/Polifonia-Corpus.git +> cd Polifonia-Corpus +``` + +The second step involves the download of the required packages +``` +> pip install -r requirements.txt +> cd interrogation +``` + +Once the environment is set up, it is possible to test the script using the default parameters with: + +``` +> python interrogate.py +``` + +The annotations related to a query will be automatically downloaded the first time that the script is used. + +To change the default keyword, the --query parameter has to be passed to the script: +``` +> python interrogate.py --query swing +``` +In this way the word 'swing' will be searched trought the corpus and some sentences will be displayed. + + +## Keyword search +The keyword search can be used to select the sentences in the corpus that contain that specific keyword. + +### API + +Setting the parameter "--interrogation_type" to "keyword" tells the system to interrogate the corpus searching the keywords +provided through the "--query" parameter. + +``` +> python interrogate.py --interrogation_type keyword --query swing +``` + +### Example +The following prompt will retrieve from the Wikipedia corpus (--corpus Wikipedia) sentences in English (--lang EN) that contain the keyword "swing". +It will show to the user up to 100 sentences (--sent_n 100) at time and ask the user if he wants to repeat the query to retrieve other sentences. +All the sentences that have been shown will be saved to a file (--save_to_file Yes) in the "out" folder of the repository. + +``` +> python interrogate.py --annotations_path ../annotations/db --corpus Wikipedia --lang EN --interrogation_type keyword --query swing --sent_n 100 --save_to_file Yes +``` + +![keyword search results](figs/keyword_search.png) + +The results of the query are presented sentence by sentence. In each line of the results there is the document id of the sentence and the keyword in context. +If the results are saved the entire sentences are saved and not just the snippets of the keyword context. + +## Concept search + +The concept search is similar to the keyword search but instead of searching the corpus using keywords it uses concepts (the specific sense of a word), exploiting the sense annotation of the corpus. + +### API + +Setting the parameter "--interrogation_type" to "concept" tells the system to interrogate the corpus searching the sentences annotated with the specified WordNet sense. +To select a concept, a lemma is provided through the "--query" parameter and the system will ask the user to select a concept related to the provided lemma. + +``` +> python interrogate.py --interrogation_type concept --query swing +``` + +### Example +The following prompt will retrieve from the Wikipedia corpus (--corpus Wikipedia) sentences in English (--lang EN) that contain a concept that has "swing" as its lemma. +To select the concept the system will provide a list of concepts as shown below. + +![concept selection](figs/concept_selection.png) + +Entering "4", the system will show to the user up to 100 sentences (--sent_n 100) annotated with the corresponding sense at time and ask the user if he wants to repeat the search to retrieve other sentences. +All the sentences that have been shown will be saved to a file (--save_to_file Yes) in the "out" folder of the repository. + +``` +> python interrogate.py --annotations_path ../annotations/db --corpus Wikipedia --lang EN --interrogation_type concept --query swing --sent_n 100 --save_to_file Yes +``` + +![concept search results](figs/concept_search.png) + +The results of the query are presented sentence by sentence. In each line of the results there is the document id of the sentence and the keyword in context. +If the results are saved the entire sentences are saved and not just the snippets of the keyword context. + + +## Entity search +The concept search is similar to the concept search but instead of searching the corpus using word senses it uses named entities (as specified in a knowledge base, Wikipedia in our case), exploiting the sense annotation of the corpus. + + +### API +Setting the parameter "--interrogation_type" to "entity" tells the system to interrogate the corpus searching the sentences annotated with the specified Wikipedia entity. +To select an entity, a word is provided through the "--query" parameter and the system will ask the user to select an entity related to the provided word. + +``` +> python interrogate.py --interrogation_type entity --query wagner +``` + +### Example +The following prompt will retrieve from the Wikipedia corpus (--corpus Wikipedia) sentences in English (--lang EN) that contain a mention to a named entity "bach" as its lemma. +To select the specific named entity the system will provide a list of named entities as shown below. + +![entity selection](figs/entity_selection.png) + +Entering "0", the system will show to the user up to 100 sentences (--sent_n 100) annotated with the corresponding named entity at time and ask the user if he wants to repeat the search to retrieve other sentences. +All the sentences that have been shown will be saved to a file (--save_to_file Yes) in the "out" folder of the repository. + +``` +> python interrogate.py --annotations_path ../annotations/db --corpus Wikipedia --lang EN --interrogation_type entity --query bach --sent_n 100 --save_to_file Yes +``` + +![entity selection](figs/entity_search.png) \ No newline at end of file diff --git a/content/_spice-h2020/ecosystem/content/_polifonia-project/broadcast-concerts-knowledge-graph/README.md b/content/_spice-h2020/ecosystem/content/_polifonia-project/broadcast-concerts-knowledge-graph/README.md new file mode 100644 index 00000000..58aef54c --- /dev/null +++ b/content/_spice-h2020/ecosystem/content/_polifonia-project/broadcast-concerts-knowledge-graph/README.md @@ -0,0 +1,8 @@ +# broadcast-concerts-knowledge-graph +The NISV knowledge graph for broadcast concerts. This knowledge graph contains metadata of more than 75,000 concerts broadcast on Dutch public service TV and radio. The source of the knowledge graph is the archival metadata stored at the Netherlands Institute for Sound & Vision. + +The concerts (and the rest of the NISV archive) are available via the [Sound&Vision SPARQL endpoint](https://cat.apis.beeldengeluid.nl/sparql). The exact concert collection can be accessed via the query in [this link](https://cat.apis.beeldengeluid.nl/#transientDatasources=https%3A%2F%2Fcat.apis.beeldengeluid.nl%2Fsparql&query=PREFIX%20sdo%3A%20%3Chttps%3A%2F%2Fschema.org%2F%3E%0A%0A%23%20Show%20the%20ID%20and%20title%20of%20all%20concerts%20that%20are%20part%20of%20the%20Dutch%20Broadcast%20Concert%20%0A%23%20(MOZ)%20collection%2C%20in%20alphabetical%20order%0A%0ASELECT%20DISTINCT%20%3FprogramUri%20%3FprogramName%0AWHERE%0A%7B%0A%20%23%20Filter%20for%20programmes%20belonging%20to%20the%20series%20%22Muziekopnamen%20Zendgemachtigden%20(MOZ)%22%2C%20using%20its%20ID%0A%20%3FprogramUri%20sdo%3ApartOfSeason%2Fsdo%3ApartOfSeries%20%3Chttp%3A%2F%2Fdata.beeldengeluid.nl%2Fid%2Fseries%2F2101608030025711131%3E%20%3B%0A%20%20%20%20%20%20%20%20%20%20%20%20%20sdo%3Aname%20%3FprogramName%20.%20%0A%7D%20ORDER%20BY%20%3FprogramName) + +Each concert has its own license, determine using the NISV rules for licensing. Where possible, this is a CC license. The license is stated in the triples for that concert. + +This knowledge graph is further documented in this [data story](https://projects.dharc.unibo.it/melody/sound_and_vision/dutch_broadcast_concert_collection) and this [blog](https://labs.beeldengeluid.nl/blogs/moz-dataset-blog) diff --git a/content/_spice-h2020/ecosystem/content/_polifonia-project/broadcast-concerts-knowledge-graph/ecosystem/broadcast-concerts-docs-blog.md b/content/_spice-h2020/ecosystem/content/_polifonia-project/broadcast-concerts-knowledge-graph/ecosystem/broadcast-concerts-docs-blog.md new file mode 100644 index 00000000..357641ca --- /dev/null +++ b/content/_spice-h2020/ecosystem/content/_polifonia-project/broadcast-concerts-knowledge-graph/ecosystem/broadcast-concerts-docs-blog.md @@ -0,0 +1,21 @@ +--- +component-id: broadcast-concerts-docs-blog +type: Documentation +name: Broadcast Concerts blog +description: The Broadcast Concerts blog describes the key features of the Broadcast Concerts knowledge graph +work-package: +- WP2 +pilot: +- Interlink +project: polifonia-project +resource: https://labs.beeldengeluid.nl/blogs/moz-dataset-blog +contributors: +- Willem Melder +- Mari Wigham +- Govert Brinkmann +related-components: +- reuses: + - broadcast-concerts-sparql-endpoint + - braodcast-concerts-knowledge-graph + +--- diff --git a/content/_spice-h2020/ecosystem/content/_polifonia-project/broadcast-concerts-knowledge-graph/ecosystem/broadcast-concerts-docs-melody.md b/content/_spice-h2020/ecosystem/content/_polifonia-project/broadcast-concerts-knowledge-graph/ecosystem/broadcast-concerts-docs-melody.md new file mode 100644 index 00000000..740ad5af --- /dev/null +++ b/content/_spice-h2020/ecosystem/content/_polifonia-project/broadcast-concerts-knowledge-graph/ecosystem/broadcast-concerts-docs-melody.md @@ -0,0 +1,20 @@ +--- +component-id: broadcast-concerts-docs-melody +type: Documentation +name: Broadcast Concerts Data Story +description: The Broadcast Concerts Data Story describes the key features of the Broadcast Concerts knowledge graph +work-package: +- WP2 +pilot: +- Interlink +project: polifonia-project +resource: https://projects.dharc.unibo.it/melody/sound_and_vision/dutch_broadcast_concert_collection +contributors: +- Willem Melder +- Mari Wigham +- Govert Brinkmann +related-components: +- reuses: + - broadcast-concerts-sparql-endpoint + - broadcast-concerts-knowledge-graph +--- diff --git a/content/_spice-h2020/ecosystem/content/_polifonia-project/broadcast-concerts-knowledge-graph/ecosystem/broadcast-concerts-sparql-endpoint.md b/content/_spice-h2020/ecosystem/content/_polifonia-project/broadcast-concerts-knowledge-graph/ecosystem/broadcast-concerts-sparql-endpoint.md new file mode 100644 index 00000000..efea22b0 --- /dev/null +++ b/content/_spice-h2020/ecosystem/content/_polifonia-project/broadcast-concerts-knowledge-graph/ecosystem/broadcast-concerts-sparql-endpoint.md @@ -0,0 +1,23 @@ +--- +component-id: broadcast-concerts-sparql-endpoint +type: SPARQLEndpoint +name: NISV SPARQL Endpoint +description: The SPARQL endpoint that publishes NISV linked data, including that of the broadcast concerts +resource: https://cat.apis.beeldengeluid.nl/ +work-package: +- WP2 +pilot: +- Interlink +project: polifonia-project +copyright: Each concert has its own license, determine using the NISV rules for licensing. Where possible, this is a CC license. The license is stated in the triples for that concert. +contributors: +- Willem Melder +- Mari Wigham +- Govert Brinkmann +related-components: +- reuses: + - https://schema.org +- serves: + - broadcast-concerts-knowledge-graph +--- + diff --git a/content/_spice-h2020/ecosystem/content/_polifonia-project/broadcast-concerts-knowledge-graph/ecosystem/broadcast-concerts.md b/content/_spice-h2020/ecosystem/content/_polifonia-project/broadcast-concerts-knowledge-graph/ecosystem/broadcast-concerts.md new file mode 100644 index 00000000..0e062ee1 --- /dev/null +++ b/content/_spice-h2020/ecosystem/content/_polifonia-project/broadcast-concerts-knowledge-graph/ecosystem/broadcast-concerts.md @@ -0,0 +1,62 @@ +--- +component-id: broadcast-concerts-knowledge-graph +type: KnowledgeGraph +name: Broadcast Concerts Knowledge Graph +description: The NISV knowledge graph for broadcast concerts. This knowledge graph contains metadata of more than 75,000 concerts broadcast on Dutch public service TV and radio. The source of the knowledge graph is the archival metadata stored at the Netherlands Institute for Sound & Vision. +resource: + - https://cat.apis.beeldengeluid.nl/#transientDatasources=https%3A%2F%2Fcat.apis.beeldengeluid.nl%2Fsparql&query=PREFIX%20sdo%3A%20%3Chttps%3A%2F%2Fschema.org%2F%3E%0A%0A%23%20Show%20the%20ID%20and%20title%20of%20all%20concerts%20that%20are%20part%20of%20the%20Dutch%20Broadcast%20Concert%20%0A%23%20(MOZ)%20collection%2C%20in%20alphabetical%20order%0A%0ASELECT%20DISTINCT%20%3FprogramUri%20%3FprogramName%0AWHERE%0A%7B%0A%20%23%20Filter%20for%20programmes%20belonging%20to%20the%20series%20%22Muziekopnamen%20Zendgemachtigden%20(MOZ)%22%2C%20using%20its%20ID%0A%20%3FprogramUri%20sdo%3ApartOfSeason%2Fsdo%3ApartOfSeries%20%3Chttp%3A%2F%2Fdata.beeldengeluid.nl%2Fid%2Fseries%2F2101608030025711131%3E%20%3B%0A%20%20%20%20%20%20%20%20%20%20%20%20%20sdo%3Aname%20%3FprogramName%20.%20%0A%7D%20ORDER%20BY%20%3FprogramName +work-package: +- WP2 +pilot: +- Interlink +project: polifonia-project +doi: 10.5281/zenodo.7377532 +copyright: Each concert has its own license, determine using the NISV rules for licensing. Where possible, this is a CC license. The license is stated in the triples for that concert. +contributors: +- Willem Melder +- Mari Wigham +- Govert Brinkmann +related-components: +- documentation: + - broadcast-concerts-docs-melody + - broadcast-concerts-docs-blog +- reuses: + - https://schema.org + - broadcast-concerts-sparql-endpoint +- story: + - "William#1_EuropeanFolkMusic" +- persona: + - William + +--- +# Broadcast Concerts Knowledge Graph + +The NISV knowledge graph for broadcast concerts. This knowledge graph contains metadata of more than 75,000 concerts broadcast on Dutch public service TV and radio. + +## Knowledge Graph description +The source of the knowledge graph is the archival metadata stored at the Netherlands Institute for Sound & Vision. The concerts (and the rest of the NISV archive) are available via the [Sound & Vision SPARQL endpoint](https://cat.apis.beeldengeluid.nl/sparql). The exact concert collection can be accessed via the query in [this link](https://cat.apis.beeldengeluid.nl/#transientDatasources=https%3A%2F%2Fcat.apis.beeldengeluid.nl%2Fsparql&query=PREFIX%20sdo%3A%20%3Chttps%3A%2F%2Fschema.org%2F%3E%0A%0A%23%20Show%20the%20ID%20and%20title%20of%20all%20concerts%20that%20are%20part%20of%20the%20Dutch%20Broadcast%20Concert%20%0A%23%20(MOZ)%20collection%2C%20in%20alphabetical%20order%0A%0ASELECT%20DISTINCT%20%3FprogramUri%20%3FprogramName%0AWHERE%0A%7B%0A%20%23%20Filter%20for%20programmes%20belonging%20to%20the%20series%20%22Muziekopnamen%20Zendgemachtigden%20(MOZ)%22%2C%20using%20its%20ID%0A%20%3FprogramUri%20sdo%3ApartOfSeason%2Fsdo%3ApartOfSeries%20%3Chttp%3A%2F%2Fdata.beeldengeluid.nl%2Fid%2Fseries%2F2101608030025711131%3E%20%3B%0A%20%20%20%20%20%20%20%20%20%20%20%20%20sdo%3Aname%20%3FprogramName%20.%20%0A%7D%20ORDER%20BY%20%3FprogramName) + +Each concert has its own license, determine using the NISV rules for licensing. Where possible, this is a CC license. The license is stated in the triples for that concert. + +## Competency questions related to the Broadcast Concerts Knowledge Graph +William: + +* In which collections/datasets does song X occur? + +* Which composers and performers are related to these compositions?' + +* What are the relations between the relevant countries, compositions, composers and performers in the various collections? + +* How many search results are there per time period? + +* How many search results are there per genre? + +## MELODY Data stories +[MELODY data story about the broadcast concerts collection](https://projects.dharc.unibo.it/melody/sound_and_vision/dutch_broadcast_concert_collection) +## Blog + +This knowledge graph is further documented in this [this blog](https://labs.beeldengeluid.nl/blogs/moz-dataset-blog) + +## Additional Information +### Queries +See the example queries listed in the client at the [Sound & Vision SPARQL endpoint](https://cat.apis.beeldengeluid.nl/sparql) diff --git a/content/_spice-h2020/ecosystem/content/_polifonia-project/clef/LICENSE.md b/content/_spice-h2020/ecosystem/content/_polifonia-project/clef/LICENSE.md new file mode 100644 index 00000000..afad3ea7 --- /dev/null +++ b/content/_spice-h2020/ecosystem/content/_polifonia-project/clef/LICENSE.md @@ -0,0 +1,13 @@ +ISC License (ISC) +================================== +_Copyright 2021 marilenadaquino_ + +Permission to use, copy, modify, and/or distribute this software for any purpose with or +without fee is hereby granted, provided that the above copyright notice and this permission +notice appear in all copies. + +THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES WITH REGARD TO THIS +SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL +THE AUTHOR BE LIABLE FOR ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE +OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. diff --git a/content/_spice-h2020/ecosystem/content/_polifonia-project/clef/README.md b/content/_spice-h2020/ecosystem/content/_polifonia-project/clef/README.md new file mode 100644 index 00000000..55747100 --- /dev/null +++ b/content/_spice-h2020/ecosystem/content/_polifonia-project/clef/README.md @@ -0,0 +1,65 @@ +--- +component-id: clef-software +type: WebApplication +name: CLEF +description: CLEF, Crowdsourcing Linked Entities via web Form, is a lightweight Linked Open Data native cataloguing system tailored to small-medium crowdsourcing projects. +work-package: +- WP1 +project: polifonia-project +resource: https://github.com/polifonia-project/clef/releases +demo: https://projects.dharc.unibo.it/musow/ +release-date: 2023/05/13 +release-number: v1.0.2 +release-link: https://github.com/polifonia-project/clef/releases/latest +doi: 10.5281/zenodo.7906320 +changelog: https://github.com/polifonia-project/clef/releases/latest +license: +- IscLicense +contributors: +- Marilena Daquino +- Martin Hlosta +related-components: +- persona: Laurent +- story: Laurent#1_MusicArchives +bibliography: +- main-publication: "Daquino, M., Wigham, M., Daga, E., Giagnolini, L., & Tomasi, F. (2023). Clef. a linked open data native system for crowdsourcing. JOCCH. DOI: https://dl.acm.org/doi/10.1145/3594721" +--- + +# CLEF, Crowdsourcing Linked Entities via web Form + +[![DOI](https://zenodo.org/badge/479251315.svg)](https://zenodo.org/badge/latestdoi/479251315) +[![License: ISC](https://img.shields.io/badge/License-ISC-blue.svg)](https://opensource.org/licenses/ISC) + +CLEF is a lightweight Linked Open Data (LOD) native cataloguing system tailored to small-medium collaborative projects. It offers a web-ready solution for setting up data collection or crowdsourcing campaigns. It is designed to facilitate admin tasks, and to allow collaborators to produce high quality linked open data via user interface, without the burden of understanding what all this fuzz around LOD is about! + +Some highlights: + + * customisable: create your templates for data collection + * collaborative: allow everybody to contribute to your catalogue and grant permissions to smaller groups of collaborators + * peer-review: allow collaborators to review records before publication + * autocompletion: ensure collaborators reference the same contents. Autocomplete suggestions from Wikidata, geonames and your own catalogue make it easier! + * synchronization with github: use github to authenticate collaborators, to backup and version your data + * web archiving: save your web sources for future use in Internet archive + * release 5-star data store, serve, and query your data as Linked Open Data + * browse data immediately CLEF comes with an online browsable catalogue. You can create indexes with one click (no need to develop another application for showcasing your data!) + * work locally or remote use CLEF from your desktop or on a remote server + +Full documentation: https://polifonia-project.github.io/clef/. + +See a working demo: https://projects.dharc.unibo.it/musow/. + + +CLEF is part of [Polifonia](https://polifonia-project.eu/) H2020 project (Deliverable 1.9). This repository moved from: https://github.com/polifonia-project/registry_app. + +Cite this repository as: + +``` +Daquino Marilena et al. (2022). CLEF (v1.0.1). DOI: 10.5281/zenodo.6559445 +``` + +or cite our journal paper: + +``` +Marilena Daquino, Mari Wigham, Enrico Daga, Lucia Giagnolini, and Francesca Tomasi (2022). CLEF. A Linked Open Data native system for Crowdsourcing. JOCCH (soon to be published). arXiv: https://arxiv.org/abs/2206.08259. +``` + diff --git a/content/_spice-h2020/ecosystem/content/_polifonia-project/cometa-ontology/README.md b/content/_spice-h2020/ecosystem/content/_polifonia-project/cometa-ontology/README.md new file mode 100644 index 00000000..aa3db5b9 --- /dev/null +++ b/content/_spice-h2020/ecosystem/content/_polifonia-project/cometa-ontology/README.md @@ -0,0 +1,107 @@ +# Co*Meta +An extension of [Music Meta](https://github.com/polifonia-project/musicmeta-ontology) to describe the metadata of music **co**llections, **co**rpora, **co**ntainers, or simply music datasets! + +[![DOI](https://zenodo.org/badge/372536364.svg)](https://zenodo.org/badge/latestdoi/372536364) +[![License: CC BY 4.0](https://img.shields.io/badge/License-CC_BY_4.0-lightgrey.svg)](https://creativecommons.org/licenses/by/4.0/) + +> 🔗 Ontology URI: [https://w3id.org/polifonia/ontology/cometa/](https://w3id.org/polifonia/ontology/cometa/) + +Here, metadata is described at the collection-level (data curator, annotations provided, availability of audio music, etc.), and at the content-level, (e.g., the title, artist, release of each piece in a dataset). The design of CoMeta is informed by a survey of Music Information Retrieval datasets, which allowed for the categorisation of common fields. + + +![overview](diagrams/cometa_overview.png) + + +The ontology designed to describe music datasets as containers of music-related data with specific characteristics and annotations. + +- **Collection Information**: the ontology captures information about the dataset as a whole, including the number of records (compositions or performances), genres, year of release, collection metadata (project investigator, university, etc.), and content metadata (specification document with track-level information like title, artist, release, MusicBrainz identifier). This also includes properties such as music media type (audio or symbolic), duration, audio format (MP3, WAV, FLAC), symbolic format (MIDI, MusicXML, MEI), and other additional media (audio features, rankings, etc.). + +- **Annotations**: to represent the annotations provided within the dataset, which are crucial for MIR tasks. It would include various types of annotations contributed by domain experts (musicologists, composition teachers) or listeners, covering aspects like music structure, key, chord progressions, emotions, listening habits, etc. + +- **Computational tasks**: to define the different tasks that a dataset enables based on the available annotations. Examples in MIR include music emotion recognition, pattern extraction, cadence detection, etc. Together with the [`Music Algorithm`](https://github.com/polifonia-project/music-algorithm-ontology) ontology (its sibling ontology) it also allows to track the performance/accuracy of computational methods tested on each dataset. + +- **Access and availability**: to capture information regarding the accessibility of the dataset, including whether it is open, on-demand, or closed, and whether it can be accessed online or requires manual provisioning. It may also include details about an API if available. + +- **License/Copyright**: to represent the licensing and copyright information associated with the dataset, ensuring compliance and proper attribution. + +- **References**: to provide links to official websites and academic manuscripts describing the dataset and its collection process, facilitating proper citation and reference. + + +By incorporating and supporting these requirements, the ontology would provide a structured representation of music datasets, their metadata, annotations, and interconnections. It would enable researchers and practitioners to explore, analyse, and utilize the datasets more effectively, promote interoperability, and facilitate the automatic discovery and extraction of knowledge from music-related data. + +![overview](diagrams/cometa_definition.png) + +![overview](diagrams/cometa_content.png) + + +--- + + +## License + +This work is licensed under a +[Creative Commons Attribution 4.0 International License][cc-by]. + +[cc-by]: http://creativecommons.org/licenses/by/4.0/ diff --git a/content/_spice-h2020/ecosystem/content/_polifonia-project/cometa-ontology/header.md b/content/_spice-h2020/ecosystem/content/_polifonia-project/cometa-ontology/header.md new file mode 100644 index 00000000..f3cca605 --- /dev/null +++ b/content/_spice-h2020/ecosystem/content/_polifonia-project/cometa-ontology/header.md @@ -0,0 +1,46 @@ +--- +component-id: https://w3id.org/polifonia/ontology/cometa/ +type: Ontology +name: CoMeta Ontology +description: An extension of Music Meta to describe music datasets +image: diagrams/cometa_overview.png +work-package: +- WP2 +pilot: +- INTERLINK +project: polifonia-project +resource: ontology/cometa.owl +release-date: 13/05/2023 +release-number: v1.0 +release-link: https://github.com/polifonia-project/ontology-network/releases +doi: 10.5281/zenodo.7919970 +changelog: https://github.com/polifonia-project/ontology-network/releases +licence: +- CC-BY_v4 +copyright: "Copyright (c) 2023 CoMeta Ontology Contributors" +contributors: +- Jacopo de Berardinis +- Andrea Poltronieri +- Nicolas Lazzari +related-components: +- informed-by: + - polifoniacq-dataset +- reuses: # any reused/imported ontology + - https://w3id.org/polifonia/ontology/core/ + - https://w3id.org/polifonia/ontology/music-meta/ +- extends: # any extended ontology + - https://w3id.org/polifonia/ontology/music-meta/ +- story: # any related story this ontology addresses + - Linka#1_MusicKnowledge +- persona: # any persona this ontology addresses + - Linka +--- + + +# CoMeta Ontology + +An extension of Music Meta to describe the metadata of music collections, corpora, containers, or simply music datasets! Here, metadata is described at the collection-level (data curator, annotations provided, availability of audio music, etc.), and at the content-level, (e.g., the title, artist, release of each piece in a dataset). The design of CoMeta is informed by a survey of Music Information Retrieval datasets, which allowed for the categorisation of common fields. + +[Link to the website](https://github.com/polifonia-project/cometa-ontology) + diff --git a/content/_spice-h2020/ecosystem/content/_polifonia-project/dashboard/LICENSE.md b/content/_spice-h2020/ecosystem/content/_polifonia-project/dashboard/LICENSE.md new file mode 100644 index 00000000..40afa3e7 --- /dev/null +++ b/content/_spice-h2020/ecosystem/content/_polifonia-project/dashboard/LICENSE.md @@ -0,0 +1,13 @@ +ISC License (ISC) +================================== +_Copyright 2021 giuliarenda_ + +Permission to use, copy, modify, and/or distribute this software for any purpose with or +without fee is hereby granted, provided that the above copyright notice and this permission +notice appear in all copies. + +THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES WITH REGARD TO THIS +SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL +THE AUTHOR BE LIABLE FOR ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE +OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. \ No newline at end of file diff --git a/content/_spice-h2020/ecosystem/content/_polifonia-project/dashboard/README.md b/content/_spice-h2020/ecosystem/content/_polifonia-project/dashboard/README.md new file mode 100644 index 00000000..0b377234 --- /dev/null +++ b/content/_spice-h2020/ecosystem/content/_polifonia-project/dashboard/README.md @@ -0,0 +1,93 @@ +--- +component-id: melody-software +type: WebApplication +name: MELODY +description: MELODY is a dashboarding system for designing and publishing data stories based on Linked Open Data. +work-package: +- WP1 +project: polifonia-project +resource: https://github.com/polifonia-project/dashboard/releases +demo: https://projects.dharc.unibo.it/melody/ +release-date: 2022/05/12 +release-number: v0.1.1 +release-link: https://github.com/polifonia-project/dashboard/releases/latest +doi: 10.5281/zenodo.6637345 +changelog: https://github.com/polifonia-project/dashboard/releases/latest +license: +- IscLicense +contributors: +- Marilena Daquino +- Giulia Renda +--- + + +# Polifonia Dashboard + +[![DOI](https://zenodo.org/badge/431529042.svg)](https://zenodo.org/badge/latestdoi/431529042) + +MELODY - Make mE a Linked Open Data StorY is a dashboarding system that allows users familiar with Linked Open Data to create web-ready data stories. + + * Authenticate with GitHub to create a new story. + * Access data from any SPARQL endpoint. + * Select the layout template of your story. + * Include charts, sections, filters, and descriptions. + * Preview the final data story while creating it. + * Embed or export your data story and single charts in several formats. + +See the full documentation at https://polifonia-project.github.io/dashboard/. + +## Quickstart + +> **Step #1 - Get the source code** + +- Download the ZIP +- Use GIT tool in the terminal/powershel/bash to clone the source code + +> **Step #2 - Set up the environment** + +1. Python3 should be installed properly in the workstation. If you are not sure if Python is +properly installed, please open a terminal and type python --version. +2. Enter the project folder using the terminal/powershel/bash. +3. Install modules using a [Virtual Environment](https://docs.python.org/3/library/venv.html) + +```bash + +#MacOS/Linux +$ cd myproject +$ python3 -m venv venv +$ . venv/bin/activate + +#Windows +> cd myproject +> py -3 -m venv venv +> venv\Scripts\activate +``` + + +> **Step #3 - Install requirements** + +`pip install -r requirements.txt` + +> **Step #4 - Run the application** +```bash +#bash +$ export FLASK_APP=app +$ flask run +* Running on http://127.0.0.1:5000/ + +#CMD +> set FLASK_APP=app +> flask run +* Running on http://127.0.0.1:5000/ + +#Powershell +> $env:FLASK_APP = "app" +> flask run +* Running on http://127.0.0.1:5000/ +``` + +MELODY is part of [Polifonia](https://polifonia-project.eu) H2020 project (described in Deliverable 1.9). Cite this repository as follows: + +``` +Renda Giulia, and Marilena Daquino. (2022). MELODY: Beta release (v0.1.1). DOI: 10.5281/zenodo.6637346 +``` diff --git a/content/_spice-h2020/ecosystem/content/_polifonia-project/deep-listening/README.md b/content/_spice-h2020/ecosystem/content/_polifonia-project/deep-listening/README.md new file mode 100644 index 00000000..fd9451ec --- /dev/null +++ b/content/_spice-h2020/ecosystem/content/_polifonia-project/deep-listening/README.md @@ -0,0 +1,29 @@ +--- +component-id: deep-listening +name: Deep Listening +description: Software, methods and user studies exploring the cross-modal interpretation of music and visual art +type: User interface +work-package: +- WP5 +project: polifonia-project +resource: https://github.com/spice-h2020/imma-slow-looking +demo: https://spice.kmi.open.ac.uk/demos/imma-slow-looking +licence: +- CC-BY_v4 +contributors: +- Paul Mulholland +- Enrico Daga +- Jason Carvalho +bibliography: +- publication: "Paul Mulholland, Adam Stoneman, Naomi Barker, Mark Maguire, Jason Carvalho, Enrico Daga, and Paul Warren. 2023. The Sound of Paintings: Using Citizen Curation to Explore the Cross-Modal Personalization of Mu- seum Experiences. In UMAP ’23 Adjunct: Adjunct Proceedings of the 31st ACM Conference on User Modeling, Adaptation and Personalization (UMAP ’23 Adjunct), June 26–29, 2023, Limassol, Cyprus. ACM, New York, NY, USA, 11 pages. https://doi.org/10.1145/3563359.3596662" +--- + +# Deep Listening + +Deep Listening is being carried out as part of the Polifonia project to investigate how the cross-modal interpretation of music and visual art can enhance what you hear and what you see. + +The work further extends the Deep Viewpoints software that was developed as part of the EU H2020 SPICE project to support the process of Slow Looking at visual art. Within Deep Viewpoints, the processes of observing and responding to art are guided by scripts. Each script is made up of a sequence of stages containing artworks, statements and various prompts or questions to which the reader of the script can respond. During the EU H2020 funded SPICE project, the Irish Museum of Modern Art (IMMA) used Deep Viewpoints as part of an initiative to reach communities traditionally underserved by the museum sector and to bring new perspectives to the museum’s collection and exhibitions. Participating communities were not only involved in interpreting artworks with the guidance of the scripts but also creating new scripts, mediating how others observe and think about art. + +Recent work in collaboration between the Polifonia and SPICE projects has investigated how Deep Viewpoints could be extended to support the cross-modal interpretation of music and visual art. First, support was added for embedding YouTube videos within the scripts to support listening concurrent with viewing artworks, reading associated text, and answering pro- vided prompts within the page of the app. Second, functionality to support multiple choice as well as free text responses to questions was added to support the rating of music on a scale or selecting an emotion that matched the music. Third, a responsive web de- sign (RWD) approach was taken to supporting both the following and authoring of scripts. This was done to potentially support the following and authoring of scripts on personal smartphones (potentially with headphones) while in the museum as well as on larger screen devices. + +The revised software has been used in two ways: (i) a musicologist curating experiences that link music to visual art in a museum collection, and (ii) visitors to a museum exhibition experiencing and creating cross-modal experiences. diff --git a/content/_spice-h2020/ecosystem/content/_polifonia-project/documentary-evidence-benchmark/README.md b/content/_spice-h2020/ecosystem/content/_polifonia-project/documentary-evidence-benchmark/README.md new file mode 100644 index 00000000..d023c009 --- /dev/null +++ b/content/_spice-h2020/ecosystem/content/_polifonia-project/documentary-evidence-benchmark/README.md @@ -0,0 +1,97 @@ +--- +component-id: documentary-evidence-benchmark +name: Documentary evidence benchmark +brief-description: the benchmark for the extraction of documentary evidence +type: Dataset +release-date: 25/04/2022 +release-number: v1.0 +work-package: +- WP1 +- WP4 +pilot: +- CHILD +licence: +- Apache-2.0 +related-component: +- led +- sparql-anything-cli +credits: +- Enrico Daga +--- +# Documentary evidence benchmark + +This project provides the benchmark for the extraction of documentary evidence, taking the Listening Experience Database (LED) as a reference. + +Below is information on the tasks, data, and the process that generated them from the LED database, also included ([led-SNAPSHOT.nt.tar.gz](led-SNAPSHOT.nt.tar.gz)). + +## Files + +### Sources + +- [CHILD exemplar sources.xlsx](CHILD exemplar sources.xlsx) A curated list of sources from LED that also include experiences relevant to childhood +- [corpus/](corpus/) A folder containing `.txt` files of the books selected sources +- [led-SNAPSHOT.nt.tar.gz](led-SNAPSHOT.nt.tar.gz) an archive of the Listening Experience Database in RDF/N-triples + +### Benchmark data +These are the data that can be used for benchamrking knowledge extraction processes + +- `sources.csv` (columns: `source,file,title,author,author_name,time`) +- `experiences.csv` (columns: `file,exp,excerpt,text,time,place,listening_to,environment,listener,listener_label,type,instrument,genre`) +- `child.csv` includes the list of listening experiences that were marked by domain experts to be relevant to childhood + +## Tasks +We briefly describe each task and refer to the relevant data. + +### Task 1: retrieve documentary evidence relevant to musical experiences + +This task refers to automatically identify text fragments that contain an account of listening experience, from a selection of texts (in `corpus/`). + +Input: `sources.csv` (`source,file,title,author,author_name,time` both the textual content in `corpus/` and the related metadata can be used) + +Target: for each `file`, find paragraphs that match (or overlap) with the ones in `experience (text)` (the other columns except `file` should be ignored and not used by the approach) + +### Task 2: retrieve documentary evidence relevant to childhood + +This task is equivalent to Task 1, except that the output should match the list of experiences in `child.csv` + +### Task 3: populate documentary evidence entities metadata + +This task operates on the expected output from the previous ones. Given a list of texts and related excerpts, populate the metadata describing the listening experience. + +Input: `sources.csv` (all columns), `experiences.csv` (`file,exp,excerpt,text`) + +Target: automatically derive columns in `experiences.csv`: `place,listening_to,environment,listener,listener_label,type,instrument,genre` + +### Task 4: time-indexing of documentary evidence + +This task operates on the expected output from the previous ones. Given a list of texts and related excerpts, populate the metadata describing the listening experience. + +Input: `sources.csv` (all columns), `experiences.csv` (`file,exp,excerpt,text`) + +Target: automatically derive columns in `experiences.csv`: `time` + + +## Benchmark construction process +The data was generated using [SPARQL Anything](http://sparql-anything.cc), the folling `fx` command shall be interpreted as `java -jar sparql-anything-0.7.0-SNAPSHOT.jar`. + +Generate list of sources from exemplary LED entities (uncompress the `led-SNAPSHOT.nt.tar.gz` archive before executing the following). + +``` +fx -q queries/sources.sparql -l led-SNAPSHOT.nt -o data/sources.csv -f CSV +fx -q queries/experiences.sparql -l led-SNAPSHOT.nt -o data/sources.csv -f CSV +fx -q queries/child.sparql -l led-SNAPSHOT.nt -o data/child.csv -f CSV +``` + + +### Statistics + +|title|count| +|-----|-----| +|sources|25| +|places|277| +|genres|100| +|listeners|194| +|instruments|64| +|experiences relevant to childhood|40| +|experiences|1248| +|performances/pieces|1121| diff --git a/content/_spice-h2020/ecosystem/content/_polifonia-project/documentary-evidence-benchmark/childKGC.md b/content/_spice-h2020/ecosystem/content/_polifonia-project/documentary-evidence-benchmark/childKGC.md new file mode 100644 index 00000000..8b137891 --- /dev/null +++ b/content/_spice-h2020/ecosystem/content/_polifonia-project/documentary-evidence-benchmark/childKGC.md @@ -0,0 +1 @@ + diff --git a/content/_spice-h2020/ecosystem/content/_polifonia-project/external-components/README.md b/content/_spice-h2020/ecosystem/content/_polifonia-project/external-components/README.md new file mode 100644 index 00000000..dd4494fb --- /dev/null +++ b/content/_spice-h2020/ecosystem/content/_polifonia-project/external-components/README.md @@ -0,0 +1,14 @@ +# External components +A repository to declare components of the Polifonia Ecosystem that are developed outside of the organisation. + +## How to add a component? + +1) Open an Issue ([example](https://github.com/polifonia-project/external-components/issues/2)) +2) Create a new file in `components/component-name.md` +3) Edit the file with component metadata (see [the rulebook for details](https://github.com/polifonia-project/rulebook)) +4) When completed, remember to close the Issue! + +## Questions, problems, suggestions? + +Open and Issue. + diff --git a/content/_spice-h2020/ecosystem/content/_polifonia-project/external-components/components/endpoints/data-open.md b/content/_spice-h2020/ecosystem/content/_polifonia-project/external-components/components/endpoints/data-open.md new file mode 100644 index 00000000..10a9fdd1 --- /dev/null +++ b/content/_spice-h2020/ecosystem/content/_polifonia-project/external-components/components/endpoints/data-open.md @@ -0,0 +1,49 @@ +--- +component-id: data-open-ac-uk +type: SPARQLEndpoint +name: "The Open Knowldedge Graph (OKG)" +description: "The Open Knowldedge Graph (OKG) SPARQL Endpoint" +logo: https://avatars.githubusercontent.com/u/79987779?s=200&v=4 +work-package: +- WP1 +- WP2 +- WP4 +pilot: +- MEETUPS +- CHILD +project: polifonia-project +resource: https://data.open.ac.uk/sparql +demo: https://data.open.ac.uk/sparql +licence: +- CC-BY_v4 +copyright: "Copyright (c) 2023 The Open University" +contributors: +- Enrico Daga +- Mathieu d'Aquin +bibliography: +- main-publication: "Daga, Enrico, Mathieu d’Aquin, Alessandro Adamou, and Stuart Brown. \"The open university linked data–data. open. ac. uk.\" Semantic Web 7, no. 2 (2016): 183-191." +related-components: +- serves: + - led + - meetups-knowledge-graph +- reuses: + - sparql-anything-cli + - meetups-ontology +--- + +# The Open Knowledge Graph SPARQL Endpoint + +[The Open Knowldedge Graph (OKG)](http://data.open.ac.uk) is the home of Linked Data from The Open University. + +We interlink and expose data available from open institutional repositories of [The Open University](http://www.open.ac.uk) and make it available for reuse. + +The data can be searched via a keyword-based interface, using well-known identifiers, or queried using the SPARQL endpoint. + +The datasets relate the publications, qualifications, courses and Audio/Video material produced by The Open University, as well as the people involved in making them. + +All these data are available through standard formats (RDF and SPARQL) and are (in most cases) available under an open license. + +The OKG publishes the following dataset from the Polifonia Ecosystem: + +- [The Listening Experience Database (LED)](https://data.open.ac.uk/page/context/led) +- [The MEETUPS Knowledge Graph (Historical Musical Meetups)](https://data.open.ac.uk/page/context/meetups) diff --git a/content/_spice-h2020/ecosystem/content/_polifonia-project/external-components/components/led.md b/content/_spice-h2020/ecosystem/content/_polifonia-project/external-components/components/led.md new file mode 100644 index 00000000..0e895f8d --- /dev/null +++ b/content/_spice-h2020/ecosystem/content/_polifonia-project/external-components/components/led.md @@ -0,0 +1,36 @@ +--- +component-id: led +description: "LED is an open and freely searchable database that brings together a mass of data about people’s experiences of listening to music of all kinds, in any historical period and any culture." +bibliography: + - "Adamou, Alessandro, Simon Brown, Helen Barlow, Carlo Allocca, and Mathieu d’Aquin. \"Crowdsourcing Linked Data on listening experiences through reuse and enhancement of library data.\" International Journal on Digital Libraries 20, no. 1 (2019): 61-79. http://oro.open.ac.uk/42045/1/paper_74.pdf" + - "Daga, Enrico, and Enrico Motta. \"Capturing themed evidence, a hybrid approach.\" In Proceedings of the 10th International Conference on Knowledge Capture, pp. 93-100. 2019. http://oro.open.ac.uk/67014/1/TE_Preprint_V1.pdf" +credits: + - "Helen Barlow" + - "Alessandro Adamou" + - "Jason Carvalho" + - "Enrico Daga" +pilot: + - CHILD + - MEETUPS +work-package: + - WP4 +type: KnowledgeGraph +links: + - "https://www.listeningexperience.org" + - "https://data.open.ac.uk/sparql" + - "https://led.kmi.open.ac.uk" +logo: "https://www.listeningexperience.org/led-data/themes/thekeynote-v1-01/images/logo.png" +name: "Listening Experience Database" +demo: https://led.kmi.open.ac.uk +resource: https://data.open.ac.uk/sparql +related-components: + - informed-by: + - Ortenz + - Ortenz#2_MusicalSocialNetwork + - Ortenz#1_MusicAndChildhood +--- + +# The Listening Experience Database +The [Listening Experience Database (LED)](http://led.kmi.open.ac.uk) is an open and freely searchable database that brings together a mass of data about people’s experiences of listening to music of all kinds, in any historical period and any culture. + +There are currently 11882 listening experiences, curated by domain experts, in the database. \ No newline at end of file diff --git a/content/_spice-h2020/ecosystem/content/_polifonia-project/external-components/components/midi2vec.md b/content/_spice-h2020/ecosystem/content/_polifonia-project/external-components/components/midi2vec.md new file mode 100644 index 00000000..3ac0720b --- /dev/null +++ b/content/_spice-h2020/ecosystem/content/_polifonia-project/external-components/components/midi2vec.md @@ -0,0 +1,39 @@ +--- +component-id: midi2vec +name: midi2vec +description: Compute graph embeddings from MIDI. +type: Library +release-date: 01/2022 +release-number: v0.0.1 +work-package: +- WP2 +pilot: +keywords: + - knowledge graph completion + - graph representation + - link prediction +changelog: +licence: +release-link: https://github.com/midi-ld/midi2vec/releases/tag/v0.0.1 +image: +logo: +demo: +links: + - https://github.com/midi-ld/midi2vec +#running-instance: +credits: +- "Pasquale Lisena" +- https://github.com/pasqLisena +- "Albert Meroño-Peñuela" +- https://github.com/albertmeronyo +#related-components: +bibliography: +- "Pasquale Lisena, Albert Meroño-Peñuela, Raphaël Troncy. MIDI2vec: Learning MIDI Embeddings for Reliable Prediction of Symbolic Music Metadata, to appear in Semantic Web Journal, Special Issue on Deep Learning for Knowledge Graphs, 2021. http://www.semantic-web-journal.net/content/midi2vec-learning-midi-embeddings-reliable-prediction-symbolic-music-metadata-0" + +--- + +# midi2vec + +A software library to compute graph embeddings from MIDI. + +See https://github.com/midi-ld/midi2vec diff --git a/content/_spice-h2020/ecosystem/content/_polifonia-project/external-components/components/neuma-search-engine.md b/content/_spice-h2020/ecosystem/content/_polifonia-project/external-components/components/neuma-search-engine.md new file mode 100644 index 00000000..d41cdd57 --- /dev/null +++ b/content/_spice-h2020/ecosystem/content/_polifonia-project/external-components/components/neuma-search-engine.md @@ -0,0 +1,41 @@ +--- +component-id: neuma-search-engine +name: Neuma search engine +description: "A search engine for music content" +type: Application +release-date: 05/22 +release-number: v0.5 +work-package: +- WP1 +- WP3 +pilot: +- FACETS +keywords: + - "information retrieval" + - "music score" +changelog: +licence: +release-link: +image: +logo: +demo: +links: + - http://neuma.huma-num.fr + - https://github.com/polifonia-project/facets-search-engine +resource: http://neuma.huma-num.fr +credits: +- Tiange Zhu (CNAM Paris) +- Philippe Rigaux (CNAM Paris) +- Nicolas Travers (ESILV Paris) +- Raphaël Fournier-S'niehotta (CNAM Paris) +related-components: [] +bibliography: +- "Zhu, T.; Fournier-S’niehotta, R.; Rigaux, P.; Travers, N. [A Framework for Content-Based Search in Large Music Collections](https://www.mdpi.com/2504-2289/6/1/23). Big Data Cogn. Comput. 2022, 6, 23. https://doi.org/10.3390/bdcc6010023" +--- + +# Neuma Search Engine + +The Neuma search engine is a content-based music score search engine. It +relies on a adaptated indexing procedure to store musical content as n-grams, +for several aspect of music (melody, rhythm, lyrics). It's fully +compatible with state-of-the-art search engine such as ElasticSearch. diff --git a/content/_spice-h2020/ecosystem/content/_polifonia-project/external-components/components/ramose.md b/content/_spice-h2020/ecosystem/content/_polifonia-project/external-components/components/ramose.md new file mode 100644 index 00000000..c7e97ee7 --- /dev/null +++ b/content/_spice-h2020/ecosystem/content/_polifonia-project/external-components/components/ramose.md @@ -0,0 +1,35 @@ +--- +component-id: ramose +name: RAMOSE +brief-description: "A generic tool developed in Python to create REST APIs over SPARQL endpoints" +bibliography: + - "Daquino, Marilena, Heibi, Ivan, Peroni, Silvio, Shotton, David \"Creating RESTful APIs over SPARQL endpoints using RAMOSE.\" Semantic Web Journal (2021): 1-19. https://content.iospress.com/articles/semantic-web/sw210439" +credits: + - "Marilena Daquino" + - "Ivan Heibi" + - "Silvio Peroni" + - "David Shotton" +keywords: + - "REST" + - "SPARQL" + - "RDF" +type: Service +links: + - "https://github.com/opencitations/ramose" + - "https://opencitations.net/querying" +licence: ISC +related-components: + - webportal +running-instance: + - https://opencitations.net/ccc/api/v1 + - https://opencitations.net/index/croci/api/v1 + - https://opencitations.net/index/coci/api/v1 + - https://opencitations.net/index/api/v1 + - https://opencitations.net/api/v1 +--- + +# RAMOSE + +Restful API Manager Over SPARQL Endpoints (RAMOSE) is an application that allows agile development and publication of documented RESTful APIs for querying SPARQL endpoints, according to a particular specification document. + +See the [source code](https://github.com/opencitations/ramose) diff --git a/content/_spice-h2020/ecosystem/content/_polifonia-project/external-components/components/sparql-anything/command-line.md b/content/_spice-h2020/ecosystem/content/_polifonia-project/external-components/components/sparql-anything/command-line.md new file mode 100644 index 00000000..a99ce484 --- /dev/null +++ b/content/_spice-h2020/ecosystem/content/_polifonia-project/external-components/components/sparql-anything/command-line.md @@ -0,0 +1,114 @@ +--- +component-id: sparql-anything-cli +type: CLITool +name: SPARQL Anything Command Line +description: Command line executable of SPARQL Anything +logo: https://avatars.githubusercontent.com/u/79987779?s=200&v=4 +work-package: +- WP1 +- WP2 +- WP3 +- WP4 +pilot: +- MEETUPS +- CHILD +project: polifonia-project +resource: https://github.com/SPARQL-Anything/sparql.anything/releases +demo: https://github.com/SPARQL-Anything/showcase-tate +release-date: 2022/12/18 +release-number: v0.8.1 +release-link: https://github.com/SPARQL-Anything/sparql.anything/releases/tag/v0.8.1 +doi: 10.5281/zenodo.7454360 +changelog: https://github.com/SPARQL-Anything/sparql.anything/releases/tag/v0.8.1 +licence: +- Apache-2.0 +copyright: "Copyright (c) 2022 SPARQL Anything Contributors @ http://github.com/sparql-anything" +contributors: +- Luigi Asprino +- Enrico Daga +- Justin Dowdy +- Marco Ratta +related-components: +- informed-by: sparql-anything-requirements +- documentation: + - sparql-anything-docs + - sparql-anything-tutorials +- reuses: + - sparql-anything-java +--- + +# SPARQL Anything Command Line + +The command line interface of SPARQL Anything. +An executable JAR can be obtained from the [Releases](https://github.com/spice-h2020/sparql.anything/releases) page. + +The jar can be executed as follows: + +``` +usage: java -jar sparql.anything- -q query [-f ] [-v ... ] [-l path] [-o + filepath] + -q,--query The path to the file storing the + query to execute or the query + itself. + -o,--output OPTIONAL - The path to the output + file. [Default: STDOUT] + -e,--explain OPTIONAL - Explain query execution + -l,--load OPTIONAL - The path to one RDF file + or a folder including a set of + files to be loaded. When present, + the data is loaded in memory and + the query executed against it. + -f,--format OPTIONAL - Format of the output + file. Supported values: JSON, XML, + CSV, TEXT, TTL, NT, NQ. [Default: + TEXT or TTL] + -s,--strategy OPTIONAL - Strategy for query + evaluation. Possible values: '1' - + triple filtering (default), '0' - + triplify all data. The system + fallbacks to '0' when the strategy + is not implemented yet for the + given resource type. + -p,--output-pattern OPTIONAL - Output filename pattern, + e.g. 'myfile-?friendName.json'. + Variables should start with '?' and + refer to bindings from the input + file. This option can only be used + in combination with 'input' and is + ignored otherwise. This option + overrides 'output'. + -v,--values OPTIONAL - Values passed as input + parameter to a query template. When + present, the query is pre-processed + by substituting variable names with + the values provided. The argument + can be used in two ways. (1) + Providing a single SPARQL ResultSet + file. In this case, the query is + executed for each set of bindings + in the input result set. Only 1 + file is allowed. (2) Named variable + bindings: the argument value must + follow the syntax: + var_name=var_value. The argument + can be passed multiple times and + the query repeated for each set of + values. + -i,--input [Deprecated] OPTIONAL - The path to + a SPARQL result set file to be used + as input. When present, the query + is pre-processed by substituting + variable names with values from the + bindings provided. The query is + repeated for each set of bindings + in the input result set. +``` + +Logging can be configured adding the following option (SLF4J): +``` +-Dorg.slf4j.simpleLogger.defaultLogLevel=trace +``` + +For more information, please see the [online documentation](https://sparql-anything.readthedocs.io/en/latest/#command-line-interface-cli) + diff --git a/content/_spice-h2020/ecosystem/content/_polifonia-project/external-components/components/sparql-anything/docker.md b/content/_spice-h2020/ecosystem/content/_polifonia-project/external-components/components/sparql-anything/docker.md new file mode 100644 index 00000000..98cd9890 --- /dev/null +++ b/content/_spice-h2020/ecosystem/content/_polifonia-project/external-components/components/sparql-anything/docker.md @@ -0,0 +1,55 @@ +--- +component-id: sparql-anything-docker +type: DockerImageContainer +name: SPARQL Anything Docker Instance +description: Docker file of the SPARQL Anything Web Server +logo: https://avatars.githubusercontent.com/u/79987779?s=200&v=4 +work-package: +- WP2 +- WP3 +- WP4 +pilot: +- MEETUPS +project: polifonia-project +resource: https://github.com/SPARQL-Anything/sparql.anything/blob/v0.8-DEV/Dockerfile.development +release-date: 2022/12/18 +release-number: v0.8.1 +release-link: https://github.com/SPARQL-Anything/sparql.anything/releases/tag/v0.8.1 +doi: 10.5281/zenodo.7454360 +changelog: https://github.com/SPARQL-Anything/sparql.anything/releases/tag/v0.8.1 +related-components: +- documentation: sparql-anything-docs +- reuses: + - sparql-anything-server + - sparql-anything-java +--- + +# SPARQL Anything Docker Image + +Instructions to build a docker image with a running Fuseki server. + +Create a Dockerfile as follows, see also [this file](https://github.com/SPARQL-Anything/sparql.anything/blob/v0.8-DEV/Dockerfile.development): + +``` +FROM mcr.microsoft.com/playwright/java:focal +# needed for the headless browser + +LABEL description="SPARQL Anything" + +RUN apt-get update && apt-get install -y maven +# Set the locale https://stackoverflow.com/questions/28405902/how-to-set-the-locale-inside-a-debian-ubuntu-docker-container +RUN apt-get install locales +RUN sed -i '/en_US.UTF-8/s/^# //g' /etc/locale.gen && \ + locale-gen +ENV LANG en_US.UTF-8 +ENV LANGUAGE en_US:en +ENV LC_ALL en_US.UTF-8 + +ENV VERSION "0.9.0-SNAPSHOT" + +# normal +CMD cd /app && mvn clean install && \ + java -cp \ + "/app/sparql-anything-fuseki/target/sparql-anything-server-${VERSION}.jar:$(for i in /app/*jar ; do printf '%s:' $i ; done)" \ + com.github.sparqlanything.fuseki.Endpoint +``` \ No newline at end of file diff --git a/content/_spice-h2020/ecosystem/content/_polifonia-project/external-components/components/sparql-anything/documentation.md b/content/_spice-h2020/ecosystem/content/_polifonia-project/external-components/components/sparql-anything/documentation.md new file mode 100644 index 00000000..5b0f55a0 --- /dev/null +++ b/content/_spice-h2020/ecosystem/content/_polifonia-project/external-components/components/sparql-anything/documentation.md @@ -0,0 +1,27 @@ +--- +component-id: sparql-anything-docs +type: Documentation +name: SPARQL Anything Documentation +description: Online documentation of SPARQL Anything +logo: https://avatars.githubusercontent.com/u/79987779?s=200&v=4 +demo: https://github.com/SPARQL-Anything/showcase-tate +resource: https://sparql-anything.readthedocs.io/en/latest/ +related-components: +- informed-by: + - sparql-anything-requirements + - sparql-anything-cli + - sparql-anything-server +licence: +- Apache-2.0 +--- + +# SPARQL Anything Documentation + +The SPARQL Anything project contributors produced an extensive [documentation](https://sparql-anything.readthedocs.io/en/latest/BROWSER/), including: + +- The complete list of configuration parameters +- Description of SPARQL extension functions and magic properties +- Guides to use the command line interface or server +- Guides to configuration for specific input formats +- How to configure the HTTP client to query Web APIs +- Tutorials diff --git a/content/_spice-h2020/ecosystem/content/_polifonia-project/external-components/components/sparql-anything/fuseki-server.md b/content/_spice-h2020/ecosystem/content/_polifonia-project/external-components/components/sparql-anything/fuseki-server.md new file mode 100644 index 00000000..4e6d4fae --- /dev/null +++ b/content/_spice-h2020/ecosystem/content/_polifonia-project/external-components/components/sparql-anything/fuseki-server.md @@ -0,0 +1,40 @@ +--- +component-id: sparql-anything-server +type: WebApplication +name: SPARQL Anything Web Server +description: Web server executable of SPARQL Anything +logo: https://avatars.githubusercontent.com/u/79987779?s=200&v=4 +demo: https://github.com/SPARQL-Anything/showcase-tate +resource: https://github.com/SPARQL-Anything/sparql.anything/releases +release-date: 2022/12/18 +release-number: v0.8.1 +release-link: https://github.com/SPARQL-Anything/sparql.anything/releases/tag/v0.8.1 +doi: 10.5281/zenodo.7454360 +changelog: https://github.com/SPARQL-Anything/sparql.anything/releases/tag/v0.8.1 +licence: +- Apache-2.0 +copyright: "Copyright (c) 2022 SPARQL Anything Contributors @ http://github.com/sparql-anything" +contributors: +- Luigi Asprino +- Enrico Daga +- Justin Dowdy +- Marco Ratta +related-components: +- documentation: + - sparql-anything-docs + - sparql-anything-tutorials +- reuses: + - sparql-anything-java + - "Apache Jena Fuseki https://jena.apache.org/" +- informed-by: + - sparql-anything-requirements +--- + +# SPARQL Anything Web Server + +SPARQL Anything is also released as a server, embedded into an instance of the Apache Jena Fuseki server. The server requires Java >= 11 to be installed in your operating system. Download the latest version of the SPARQL Anything server from the releases page. The command line is a file named sparql-anything-server-.jar. + +Run the server as follows: +``` +java -jar sparql-anything-server-.jar +``` \ No newline at end of file diff --git a/content/_spice-h2020/ecosystem/content/_polifonia-project/external-components/components/sparql-anything/java-source-code.md b/content/_spice-h2020/ecosystem/content/_polifonia-project/external-components/components/sparql-anything/java-source-code.md new file mode 100644 index 00000000..82b19fb1 --- /dev/null +++ b/content/_spice-h2020/ecosystem/content/_polifonia-project/external-components/components/sparql-anything/java-source-code.md @@ -0,0 +1,37 @@ +--- +component-id: sparql-anything-java +type: Software +name: SPARQL Anything Source Code +description: Source code of SPARQL Anything +logo: https://avatars.githubusercontent.com/u/79987779?s=200&v=4 +demo: https://github.com/SPARQL-Anything/showcase-tate +resource: https://github.com/SPARQL-Anything/sparql.anything/releases +release-date: 2022/12/18 +release-number: v0.8.1 +release-link: https://github.com/SPARQL-Anything/sparql.anything/releases/tag/v0.8.1 +doi: 10.5281/zenodo.7454360 +changelog: https://github.com/SPARQL-Anything/sparql.anything/releases/tag/v0.8.1 +licence: +- Apache-2.0 +copyright: "Copyright (c) 2022 SPARQL Anything Contributors @ http://github.com/sparql-anything" +contributors: +- Luigi Asprino +- Enrico Daga +- Justin Dowdy +- Marco Ratta +related-components: +- informed-by: sparql-anything-requirements +- documentation: + - sparql-anything-docs + - sparql-anything-tutorials +- extends: + - "Apache Jena https://jena.apache.org/" +- reuses: + - "SPARQL 1.1 Query Language https://www.w3.org/TR/sparql11-query/" +--- + +# SPARQL Anything Source Code + +SPARQL Anything is developed in Java and it is built on top of [Apache Jena](https://jena.apache.org/), which implements the [SPARQL 1.1 W3C specification](https://www.w3.org/TR/sparql11-query/). + +More information can be found in the [Github repository](http://github.com/sparql-anything/sparql.anything) \ No newline at end of file diff --git a/content/_spice-h2020/ecosystem/content/_polifonia-project/external-components/components/sparql-anything/python-library.md b/content/_spice-h2020/ecosystem/content/_polifonia-project/external-components/components/sparql-anything/python-library.md new file mode 100644 index 00000000..e2930b3e --- /dev/null +++ b/content/_spice-h2020/ecosystem/content/_polifonia-project/external-components/components/sparql-anything/python-library.md @@ -0,0 +1,23 @@ +--- +component-id: sparql-anything-python +type: SoftwareLibrary +name: PySPARQL-Anything +description: The PySPARQL-Anything software library. +logo: https://avatars.githubusercontent.com/u/79987779?s=200&v=4 +demo: http://not-exisats.com/ddd +resource: https://github.com/SPARQL-Anything/PySPARQL-Anything +release-date: 2023/01/31 +release-number: v0.8.1.1 +release-link: https://github.com/SPARQL-Anything/PySPARQL-Anything/releases/tag/v0.8.1.1 +licence: +- Apache-2.0 +copyright: "Copyright (c) 2022 SPARQL Anything Contributors @ http://github.com/sparql-anything" +contributors: +- Enrico Daga +- Marco Ratta +related-components: +- informed-by: sparql-anything-requirements +- documentation: sparql-anything-docs +- reuses: + - sparql-anything-java +--- diff --git a/content/_spice-h2020/ecosystem/content/_polifonia-project/external-components/components/sparql-anything/requirements.md b/content/_spice-h2020/ecosystem/content/_polifonia-project/external-components/components/sparql-anything/requirements.md new file mode 100644 index 00000000..9717db25 --- /dev/null +++ b/content/_spice-h2020/ecosystem/content/_polifonia-project/external-components/components/sparql-anything/requirements.md @@ -0,0 +1,40 @@ +--- +component-id: sparql-anything-requirements +type: RequirementsCollection +name: "Requirements of SPARQL Anything" +description: Requirements collection of SPARQL Anything +logo: https://avatars.githubusercontent.com/u/79987779?s=200&v=4 +demo: https://github.com/SPARQL-Anything/showcase-tate +bibliography: + - main-publication: "Daga, Enrico, Luigi Asprino, Paul Mulholland, and Aldo Gangemi. \"Facade-X: an opinionated approach to SPARQL anything.\" Studies on the Semantic Web 53 (2021): 58-73." +--- + +# Requirements of SPARQL Anything + +The motivation for researching novel ways to transform non-RDF resources into RDF comes from the scenarios under development in two EU H2020 projects [SPICE](http://spice-h2020.eu) and [Polifonia](http://polifonia-project.eu). + +In Polifonia, a consortium collaborate in developing novel ways for valorising musical cultural heritage, relying on a *linked data* network of resources from many different stakeholders in the cultural sector. +However, the majority of resources involved are not exposed as Linked Data but are released, for example, as CSV, XML, JSON files, or combinations of these formats. + +It is clear how the effort required for transforming resources could constitute a significant cost to the project. +In the absence of a strategy to cope with this diversity, content transformation may result in duplication of effort and become a serious bottleneck. + +The following Table provides a summary of the design requirements of SPARQL Anything. + +| Requirement | Description | +| :---------- | :---------- | +| Transform | Transform several sources having heterogeneous formats | +| Query | Query resources having heterogeneous formats | +| Binary|Support the transformation of binary formats| +| Embed|Support the embedding of content in RDF| +| Metadata|Support the extraction of metadata embedded in files| +| Low learning demands|Minimise the tools and languages that need to be learned| +| Low complexity|Minimise complexity of the queries| +| Meaningful abstraction|Enable focus on data structures rather than implementation details| +| Explorability|Enable data exploration without premature commitment to a mapping, in the absence of a domain ontology.| +| Workflow|Integrate with a typical Semantic Web engineering workflow| +| Adaptable|Be generic but flexible and adaptable| +| Sustainable|Inform into a software that is easy to implement, maintain, and does not have evident efficiency drawbacks| +| Extendable|Support the addition of an open set of formats| + + diff --git a/content/_spice-h2020/ecosystem/content/_polifonia-project/external-components/components/sparql-anything/sparql-anything.md b/content/_spice-h2020/ecosystem/content/_polifonia-project/external-components/components/sparql-anything/sparql-anything.md new file mode 100644 index 00000000..594fc147 --- /dev/null +++ b/content/_spice-h2020/ecosystem/content/_polifonia-project/external-components/components/sparql-anything/sparql-anything.md @@ -0,0 +1,53 @@ +--- +container-id: sparql-anything +name: SPARQL Anything +description: SPARQL Anything is a system for Semantic Web re-engineering that allows to query non-RDF files as-if they are in RDF. +type: Project +work-package: +- WP1 +- WP2 +- WP4 +pilot: +- MEETUPS +- CHILD +project: polifonia-project +bibliography: +- main-publication: "Asprino, Luigi, Enrico Daga, Aldo Gangemi, and Paul Mulholland. \"Knowledge Graph Construction with a façade: a unified method to access heterogeneous data sources on the Web.\" ACM Transactions on Internet Technology 23, no. 1 (2023): 1-31. https://dl.acm.org/doi/pdf/10.1145/3555312" +- publication: + - "Daga, Enrico, Luigi Asprino, Paul Mulholland, and Aldo Gangemi. \"Facade-X: an opinionated approach to SPARQL anything.\" Studies on the Semantic Web 53 (2021): 58-73." + - "Ratta, Marco, and Enrico Daga. \"Knowledge Graph Construction From MusicXML\": An Empirical Investigation With SPARQL Anything. http://oro.open.ac.uk/85326/1/Music_Knowledge_Graphs_Paper%20%281%29.pdf" +funder: + - name: European Commission H2020 + url: https://ec.europa.eu/info/funding-tenders/opportunities/portal/screen/programmes/h2020 + grant-agreement: "GA101004746" + - name: European Commission H2020 + url: https://ec.europa.eu/info/funding-tenders/opportunities/portal/screen/programmes/h2020 + grant-agreement: "GA870811" +credits: "This project has received funding from the European Union’s Horizon 2020 research and innovation programme under grant agreements GA101004746 (Polifonia) and GA870811 (SPICE)." +has-part: + - sparql-anything-cli + - sparql-anything-java + - sparql-anything-docs + - sparql-anything-tutorials + - sparql-anything-python + - sparql-anything-requirements + - sparql-anything-docker +--- + +# SPARQL Anything + +[SPARQL Anything](http://sparql-anything.eu) is a system for Semantic Web re-engineering that allows users to ... query anything with SPARQL. + +Main features: +- Query files in plain SPARQL 1.1, via the `SERVICE ` (see [configuration](https://sparql-anything.readthedocs.io/en/latest/#configuration)) and build knowledge graphs with `CONSTRUCT` queries +- [Supported formats](https://sparql-anything.readthedocs.io/en/latest/#supported-formats): XML, JSON, CSV, HTML, Excel, Text, Binary, EXIF, File System, Zip/Tar, Markdown, YAML, Bibtex, DOCx (see [configuration](#format-specific-options)) +- Transforms [files, inline content, or the output of an external command](https://sparql-anything.readthedocs.io/en/latest/#general-purpose-options) +- Full fledged [HTTP client](#http-options) to query Web APIs (headers, authentication, all methods supported) +- [Functions library](https://sparql-anything.readthedocs.io/en/latest/#functions-and-magic-properties) for RDF sequences, strings, hashes, easy entity building, ... +- Combine multiple SERVICE clauses into complex data integration queries (thanks to SPARQL) +- Query templates (using [BASIL variables](https://sparql-anything.readthedocs.io/en/latest/#query-templates-and-variable-bindings)) +- Save and reuse SPARQL `Results Sets` as input for [parametric queries](https://sparql-anything.readthedocs.io/en/latest/#query-templates-and-variable-bindings) +- Slice large CSV files with an iterator-like execution style (soon [JSON](https://github.com/SPARQL-Anything/sparql.anything/issues/202) and [XML](https://github.com/SPARQL-Anything/sparql.anything/issues/203)) +- Supports an [on-disk option](https://sparql-anything.readthedocs.io/en/latest/#general-purpose-options) (with Apache Jena TDB2) + +SPARQL Anything is developed in collaboration with the EU-funded project [SPICE](http://spice-h2020.eu). diff --git a/content/_spice-h2020/ecosystem/content/_polifonia-project/external-components/components/sparql-anything/tutorials.md b/content/_spice-h2020/ecosystem/content/_polifonia-project/external-components/components/sparql-anything/tutorials.md new file mode 100644 index 00000000..172c207c --- /dev/null +++ b/content/_spice-h2020/ecosystem/content/_polifonia-project/external-components/components/sparql-anything/tutorials.md @@ -0,0 +1,33 @@ +--- +component-id: sparql-anything-tutorials +type: Tutorial +name: "SPARQL Anything: Tutorials" +description: a collection of tutorials to introduce users to SPARQL Anything +logo: https://avatars.githubusercontent.com/u/79987779?s=200&v=4 +demo: https://github.com/SPARQL-Anything/showcase-tate +resource: https://sparql-anything.readthedocs.io/en/latest/TUTORIALS/ +licence: +- Apache-2.0 +copyright: "Copyright (c) 2022 SPARQL Anything Contributors @ http://github.com/sparql-anything" +contributors: +- Luigi Asprino +- Enrico Daga +- Justin Dowdy +- Marco Ratta +related-components: +- informed-by: + - sparql-anything-cli + - sparql-anything-server + - sparql-anything-python +--- + +# SPARQL Anything: Tutorials + +Here is a list of step-by-step tutorials covering several features of SPARQL Anything: + +- [A Gentle introduction to SPARQL Anything](A_GENTLE_INTRODUCTION_TO_SPARQL_ANYTHING.md): this tutorial presents the Facade-X model and shows basic transformations of JSON, CSV and XML. [Video](https://www.dropbox.com/s/bc31v0klg68op0z/SPARQLAnythingTutorial-highres.mp4?dl=0) +- [SPARQL Anything showcase: open data from the Tate Gallery](https://github.com/SPARQL-Anything/showcase-tate): this tutorial covers formats such as CSV and JSON and features such as the function `fx:anySlot` and the chaining of multiple `SERVICE` clauses. It is based on the SPARQL Anything [CLI](README.md#Usage). +- [Construct a KG of artists and artworks of the IMMA museum website](https://github.com/SPARQL-Anything/showcase-imma): This showcase demonstrates the use of SPARQL Anything for constructing a Knowledge Graph from data encoded in HTML pages. Apart from examples with the HTML input format, it covers features such as parametrised queries and the use of SPARQL result set files as parameters. It is based on the SPARQL Anything [CLI](README.md#Usage). +- [Construct a KG from the Propbank dataset](https://github.com/SPARQL-Anything/showcase-propbank): An advanced example of transformation of XML data, including querying a Zip archive. +- [Construct a KG from YAML annotations in Markdown file headers](https://github.com/SPARQL-Anything/showcase-polifonia-ecosystem): A short but complex case demonstrating how to chain multiple transformations starting from a set of Markdown files, queried to extract the YAML header, which is in turn queried to derive the annotations, that are in turn projected into a KG! +- [Populate a Music Ontology from MusicXML files](https://github.com/SPARQL-Anything/showcase-musicxml): An advanced application of SPARQL Anything to query MusicXML files and derive note sequences, computing n-grams, and populating a Music Notation ontology. \ No newline at end of file diff --git a/content/_spice-h2020/ecosystem/content/_polifonia-project/folk_ngram_analysis/FoNN/LICENSE.md b/content/_spice-h2020/ecosystem/content/_polifonia-project/folk_ngram_analysis/FoNN/LICENSE.md new file mode 100644 index 00000000..10201337 --- /dev/null +++ b/content/_spice-h2020/ecosystem/content/_polifonia-project/folk_ngram_analysis/FoNN/LICENSE.md @@ -0,0 +1,395 @@ +Attribution 4.0 International + +======================================================================= + +Creative Commons Corporation ("Creative Commons") is not a law firm and +does not provide legal services or legal advice. Distribution of +Creative Commons public licenses does not create a lawyer-client or +other relationship. Creative Commons makes its licenses and related +information available on an "as-is" basis. Creative Commons gives no +warranties regarding its licenses, any material licensed under their +terms and conditions, or any related information. Creative Commons +disclaims all liability for damages resulting from their use to the +fullest extent possible. + +Using Creative Commons Public Licenses + +Creative Commons public licenses provide a standard set of terms and +conditions that creators and other rights holders may use to share +original works of authorship and other material subject to copyright +and certain other rights specified in the public license below. The +following considerations are for informational purposes only, are not +exhaustive, and do not form part of our licenses. + + Considerations for licensors: Our public licenses are + intended for use by those authorized to give the public + permission to use material in ways otherwise restricted by + copyright and certain other rights. Our licenses are + irrevocable. Licensors should read and understand the terms + and conditions of the license they choose before applying it. + Licensors should also secure all rights necessary before + applying our licenses so that the public can reuse the + material as expected. Licensors should clearly mark any + material not subject to the license. This includes other CC- + licensed material, or material used under an exception or + limitation to copyright. More considerations for licensors: + wiki.creativecommons.org/Considerations_for_licensors + + Considerations for the public: By using one of our public + licenses, a licensor grants the public permission to use the + licensed material under specified terms and conditions. If + the licensor's permission is not necessary for any reason--for + example, because of any applicable exception or limitation to + copyright--then that use is not regulated by the license. Our + licenses grant only permissions under copyright and certain + other rights that a licensor has authority to grant. Use of + the licensed material may still be restricted for other + reasons, including because others have copyright or other + rights in the material. A licensor may make special requests, + such as asking that all changes be marked or described. + Although not required by our licenses, you are encouraged to + respect those requests where reasonable. More considerations + for the public: + wiki.creativecommons.org/Considerations_for_licensees + +======================================================================= + +Creative Commons Attribution 4.0 International Public License + +By exercising the Licensed Rights (defined below), You accept and agree +to be bound by the terms and conditions of this Creative Commons +Attribution 4.0 International Public License ("Public License"). To the +extent this Public License may be interpreted as a contract, You are +granted the Licensed Rights in consideration of Your acceptance of +these terms and conditions, and the Licensor grants You such rights in +consideration of benefits the Licensor receives from making the +Licensed Material available under these terms and conditions. + + +Section 1 -- Definitions. + + a. Adapted Material means material subject to Copyright and Similar + Rights that is derived from or based upon the Licensed Material + and in which the Licensed Material is translated, altered, + arranged, transformed, or otherwise modified in a manner requiring + permission under the Copyright and Similar Rights held by the + Licensor. For purposes of this Public License, where the Licensed + Material is a musical work, performance, or sound recording, + Adapted Material is always produced where the Licensed Material is + synched in timed relation with a moving image. + + b. Adapter's License means the license You apply to Your Copyright + and Similar Rights in Your contributions to Adapted Material in + accordance with the terms and conditions of this Public License. + + c. Copyright and Similar Rights means copyright and/or similar rights + closely related to copyright including, without limitation, + performance, broadcast, sound recording, and Sui Generis Database + Rights, without regard to how the rights are labeled or + categorized. For purposes of this Public License, the rights + specified in Section 2(b)(1)-(2) are not Copyright and Similar + Rights. + + d. Effective Technological Measures means those measures that, in the + absence of proper authority, may not be circumvented under laws + fulfilling obligations under Article 11 of the WIPO Copyright + Treaty adopted on December 20, 1996, and/or similar international + agreements. + + e. Exceptions and Limitations means fair use, fair dealing, and/or + any other exception or limitation to Copyright and Similar Rights + that applies to Your use of the Licensed Material. + + f. Licensed Material means the artistic or literary work, database, + or other material to which the Licensor applied this Public + License. + + g. Licensed Rights means the rights granted to You subject to the + terms and conditions of this Public License, which are limited to + all Copyright and Similar Rights that apply to Your use of the + Licensed Material and that the Licensor has authority to license. + + h. Licensor means the individual(s) or entity(ies) granting rights + under this Public License. + + i. Share means to provide material to the public by any means or + process that requires permission under the Licensed Rights, such + as reproduction, public display, public performance, distribution, + dissemination, communication, or importation, and to make material + available to the public including in ways that members of the + public may access the material from a place and at a time + individually chosen by them. + + j. Sui Generis Database Rights means rights other than copyright + resulting from Directive 96/9/EC of the European Parliament and of + the Council of 11 March 1996 on the legal protection of databases, + as amended and/or succeeded, as well as other essentially + equivalent rights anywhere in the world. + + k. You means the individual or entity exercising the Licensed Rights + under this Public License. Your has a corresponding meaning. + + +Section 2 -- Scope. + + a. License grant. + + 1. Subject to the terms and conditions of this Public License, + the Licensor hereby grants You a worldwide, royalty-free, + non-sublicensable, non-exclusive, irrevocable license to + exercise the Licensed Rights in the Licensed Material to: + + a. reproduce and Share the Licensed Material, in whole or + in part; and + + b. produce, reproduce, and Share Adapted Material. + + 2. Exceptions and Limitations. For the avoidance of doubt, where + Exceptions and Limitations apply to Your use, this Public + License does not apply, and You do not need to comply with + its terms and conditions. + + 3. Term. The term of this Public License is specified in Section + 6(a). + + 4. Media and formats; technical modifications allowed. The + Licensor authorizes You to exercise the Licensed Rights in + all media and formats whether now known or hereafter created, + and to make technical modifications necessary to do so. The + Licensor waives and/or agrees not to assert any right or + authority to forbid You from making technical modifications + necessary to exercise the Licensed Rights, including + technical modifications necessary to circumvent Effective + Technological Measures. For purposes of this Public License, + simply making modifications authorized by this Section 2(a) + (4) never produces Adapted Material. + + 5. Downstream recipients. + + a. Offer from the Licensor -- Licensed Material. Every + recipient of the Licensed Material automatically + receives an offer from the Licensor to exercise the + Licensed Rights under the terms and conditions of this + Public License. + + b. No downstream restrictions. You may not offer or impose + any additional or different terms or conditions on, or + apply any Effective Technological Measures to, the + Licensed Material if doing so restricts exercise of the + Licensed Rights by any recipient of the Licensed + Material. + + 6. No endorsement. Nothing in this Public License constitutes or + may be construed as permission to assert or imply that You + are, or that Your use of the Licensed Material is, connected + with, or sponsored, endorsed, or granted official status by, + the Licensor or others designated to receive attribution as + provided in Section 3(a)(1)(A)(i). + + b. Other rights. + + 1. Moral rights, such as the right of integrity, are not + licensed under this Public License, nor are publicity, + privacy, and/or other similar personality rights; however, to + the extent possible, the Licensor waives and/or agrees not to + assert any such rights held by the Licensor to the limited + extent necessary to allow You to exercise the Licensed + Rights, but not otherwise. + + 2. Patent and trademark rights are not licensed under this + Public License. + + 3. To the extent possible, the Licensor waives any right to + collect royalties from You for the exercise of the Licensed + Rights, whether directly or through a collecting society + under any voluntary or waivable statutory or compulsory + licensing scheme. In all other cases the Licensor expressly + reserves any right to collect such royalties. + + +Section 3 -- License Conditions. + +Your exercise of the Licensed Rights is expressly made subject to the +following conditions. + + a. Attribution. + + 1. If You Share the Licensed Material (including in modified + form), You must: + + a. retain the following if it is supplied by the Licensor + with the Licensed Material: + + i. identification of the creator(s) of the Licensed + Material and any others designated to receive + attribution, in any reasonable manner requested by + the Licensor (including by pseudonym if + designated); + + ii. a copyright notice; + + iii. a notice that refers to this Public License; + + iv. a notice that refers to the disclaimer of + warranties; + + v. a URI or hyperlink to the Licensed Material to the + extent reasonably practicable; + + b. indicate if You modified the Licensed Material and + retain an indication of any previous modifications; and + + c. indicate the Licensed Material is licensed under this + Public License, and include the text of, or the URI or + hyperlink to, this Public License. + + 2. You may satisfy the conditions in Section 3(a)(1) in any + reasonable manner based on the medium, means, and context in + which You Share the Licensed Material. For example, it may be + reasonable to satisfy the conditions by providing a URI or + hyperlink to a resource that includes the required + information. + + 3. If requested by the Licensor, You must remove any of the + information required by Section 3(a)(1)(A) to the extent + reasonably practicable. + + 4. If You Share Adapted Material You produce, the Adapter's + License You apply must not prevent recipients of the Adapted + Material from complying with this Public License. + + +Section 4 -- Sui Generis Database Rights. + +Where the Licensed Rights include Sui Generis Database Rights that +apply to Your use of the Licensed Material: + + a. for the avoidance of doubt, Section 2(a)(1) grants You the right + to extract, reuse, reproduce, and Share all or a substantial + portion of the contents of the database; + + b. if You include all or a substantial portion of the database + contents in a database in which You have Sui Generis Database + Rights, then the database in which You have Sui Generis Database + Rights (but not its individual contents) is Adapted Material; and + + c. You must comply with the conditions in Section 3(a) if You Share + all or a substantial portion of the contents of the database. + +For the avoidance of doubt, this Section 4 supplements and does not +replace Your obligations under this Public License where the Licensed +Rights include other Copyright and Similar Rights. + + +Section 5 -- Disclaimer of Warranties and Limitation of Liability. + + a. UNLESS OTHERWISE SEPARATELY UNDERTAKEN BY THE LICENSOR, TO THE + EXTENT POSSIBLE, THE LICENSOR OFFERS THE LICENSED MATERIAL AS-IS + AND AS-AVAILABLE, AND MAKES NO REPRESENTATIONS OR WARRANTIES OF + ANY KIND CONCERNING THE LICENSED MATERIAL, WHETHER EXPRESS, + IMPLIED, STATUTORY, OR OTHER. THIS INCLUDES, WITHOUT LIMITATION, + WARRANTIES OF TITLE, MERCHANTABILITY, FITNESS FOR A PARTICULAR + PURPOSE, NON-INFRINGEMENT, ABSENCE OF LATENT OR OTHER DEFECTS, + ACCURACY, OR THE PRESENCE OR ABSENCE OF ERRORS, WHETHER OR NOT + KNOWN OR DISCOVERABLE. WHERE DISCLAIMERS OF WARRANTIES ARE NOT + ALLOWED IN FULL OR IN PART, THIS DISCLAIMER MAY NOT APPLY TO YOU. + + b. TO THE EXTENT POSSIBLE, IN NO EVENT WILL THE LICENSOR BE LIABLE + TO YOU ON ANY LEGAL THEORY (INCLUDING, WITHOUT LIMITATION, + NEGLIGENCE) OR OTHERWISE FOR ANY DIRECT, SPECIAL, INDIRECT, + INCIDENTAL, CONSEQUENTIAL, PUNITIVE, EXEMPLARY, OR OTHER LOSSES, + COSTS, EXPENSES, OR DAMAGES ARISING OUT OF THIS PUBLIC LICENSE OR + USE OF THE LICENSED MATERIAL, EVEN IF THE LICENSOR HAS BEEN + ADVISED OF THE POSSIBILITY OF SUCH LOSSES, COSTS, EXPENSES, OR + DAMAGES. WHERE A LIMITATION OF LIABILITY IS NOT ALLOWED IN FULL OR + IN PART, THIS LIMITATION MAY NOT APPLY TO YOU. + + c. The disclaimer of warranties and limitation of liability provided + above shall be interpreted in a manner that, to the extent + possible, most closely approximates an absolute disclaimer and + waiver of all liability. + + +Section 6 -- Term and Termination. + + a. This Public License applies for the term of the Copyright and + Similar Rights licensed here. However, if You fail to comply with + this Public License, then Your rights under this Public License + terminate automatically. + + b. Where Your right to use the Licensed Material has terminated under + Section 6(a), it reinstates: + + 1. automatically as of the date the violation is cured, provided + it is cured within 30 days of Your discovery of the + violation; or + + 2. upon express reinstatement by the Licensor. + + For the avoidance of doubt, this Section 6(b) does not affect any + right the Licensor may have to seek remedies for Your violations + of this Public License. + + c. For the avoidance of doubt, the Licensor may also offer the + Licensed Material under separate terms or conditions or stop + distributing the Licensed Material at any time; however, doing so + will not terminate this Public License. + + d. Sections 1, 5, 6, 7, and 8 survive termination of this Public + License. + + +Section 7 -- Other Terms and Conditions. + + a. The Licensor shall not be bound by any additional or different + terms or conditions communicated by You unless expressly agreed. + + b. Any arrangements, understandings, or agreements regarding the + Licensed Material not stated herein are separate from and + independent of the terms and conditions of this Public License. + + +Section 8 -- Interpretation. + + a. For the avoidance of doubt, this Public License does not, and + shall not be interpreted to, reduce, limit, restrict, or impose + conditions on any use of the Licensed Material that could lawfully + be made without permission under this Public License. + + b. To the extent possible, if any provision of this Public License is + deemed unenforceable, it shall be automatically reformed to the + minimum extent necessary to make it enforceable. If the provision + cannot be reformed, it shall be severed from this Public License + without affecting the enforceability of the remaining terms and + conditions. + + c. No term or condition of this Public License will be waived and no + failure to comply consented to unless expressly agreed to by the + Licensor. + + d. Nothing in this Public License constitutes or may be interpreted + as a limitation upon, or waiver of, any privileges and immunities + that apply to the Licensor or You, including from the legal + processes of any jurisdiction or authority. + + +======================================================================= + +Creative Commons is not a party to its public +licenses. Notwithstanding, Creative Commons may elect to apply one of +its public licenses to material it publishes and in those instances +will be considered the “Licensor.” The text of the Creative Commons +public licenses is dedicated to the public domain under the CC0 Public +Domain Dedication. Except for the limited purpose of indicating that +material is shared under a Creative Commons public license or as +otherwise permitted by the Creative Commons policies published at +creativecommons.org/policies, Creative Commons does not authorize the +use of the trademark "Creative Commons" or any other trademark or logo +of Creative Commons without its prior written consent including, +without limitation, in connection with any unauthorized modifications +to any of its public licenses or any other arrangements, +understandings, or agreements concerning use of licensed material. For +the avoidance of doubt, this paragraph does not form part of the +public licenses. + +Creative Commons may be contacted at creativecommons.org. \ No newline at end of file diff --git a/content/_spice-h2020/ecosystem/content/_polifonia-project/folk_ngram_analysis/FoNN/mtc_ann_corpus/notes.md b/content/_spice-h2020/ecosystem/content/_polifonia-project/folk_ngram_analysis/FoNN/mtc_ann_corpus/notes.md new file mode 100644 index 00000000..20271042 --- /dev/null +++ b/content/_spice-h2020/ecosystem/content/_polifonia-project/folk_ngram_analysis/FoNN/mtc_ann_corpus/notes.md @@ -0,0 +1,12 @@ +**MTC-ANN corpus notes** + +Subdirectory '''../mtc_ann_corpus/midi''' contains a copy of the Meertens Tune Collection Annotated Corpus (MTC-ANN) +v2.0.1 in MIDI and **kern formats, downloaded from www.liederenbank.nl/mtc on 29 November 2021. +All other data in '''../mtc_ann_corpus''' dir is derived from this original input. + +Feature sequence data extracted at note-level and (duration-weighted) note-level. +Diatonic scale degree patterns extracted for 3 <= n <= 12 at (duration-weighted) note-level. + +KG data processed per: +Patterns and pattern locations extracted for 4 <= n <= 6 at (duration-weighted) note-level. +'Pattern corpus' filtered to include only patterns occurring at least twice in the corpus. \ No newline at end of file diff --git a/content/_spice-h2020/ecosystem/content/_polifonia-project/folk_ngram_analysis/FoNN/thesession_corpus/notes.md b/content/_spice-h2020/ecosystem/content/_polifonia-project/folk_ngram_analysis/FoNN/thesession_corpus/notes.md new file mode 100644 index 00000000..5ace7e3a --- /dev/null +++ b/content/_spice-h2020/ecosystem/content/_polifonia-project/folk_ngram_analysis/FoNN/thesession_corpus/notes.md @@ -0,0 +1,12 @@ +**The Session corpus notes** + +Subdirectory '''../thesession_corpus/abc''' contains a data dump of The Session corpus (www.thesession.org) +in ABC Notation format, which was downloaded from https://github.com/adactio/TheSession-data on 13 December 2021. +All other data in '''../thesession_corpus''' dir is derived from this original input. + +ABC --> MIDI preprocessing via FoNN.abc_ingest.py +Feature sequence data extracted at note-level, (duration-weighted) note-level and accent-level. +Patterns extracted for 3 <= n <= 12 at both levels. + +NOTE: Due to corpus size, it was not possible to push the outputs above via Git. They are available on request of the +authors. Currently, only the ABC Notation and MIDI inputs are provided in the remote version of the corpus. \ No newline at end of file diff --git a/content/_spice-h2020/ecosystem/content/_polifonia-project/folk_ngram_analysis/README.md b/content/_spice-h2020/ecosystem/content/_polifonia-project/folk_ngram_analysis/README.md new file mode 100644 index 00000000..1acd3a2a --- /dev/null +++ b/content/_spice-h2020/ecosystem/content/_polifonia-project/folk_ngram_analysis/README.md @@ -0,0 +1,184 @@ +--- +component-id: folk_ngram_analysis +name: FoNN -- Folk N-gram aNalysis +description: data ingest pipeline and musical similarity tools for symbolic music data, with two test symbolic music corpora. +type: Repository +release-date: 16/06/2023 +release-number: v1.0 +work-package: +- WP3 +licence: +- CC_BY_v4 +links: +- https://github.com/polifonia-project/folk_ngram_analysis +- https://zenodo.org/record/5768216#.YbEAbS2Q3T8 +credits: +- https://github.com/danDiamo +- https://github.com/ashahidkhattak +- https://github.com/jmmcd +--- + + + +# FONN - FOlk _N_-gram aNalysis + +- Targeting the goals of [Polifonia](https://polifonia-project.eu) WP3, FoNN contains tools to extract feature sequence data, extract musical patterns, and detect similarity within a symbolic music corpus. Although some of FoNN's functionality is tailored to Western European folk music, the software can be used on any corpus in a compatible symbolic representation format (MIDI, ABC Notation, **kern, MusicXML, or any other format compatible with the [music21](http://web.mit.edu/music21/) Python library). + +The repo contains a fully functional work-in-progress version of the software, along with two test music datasets: the Meertens Annotated Corpus (MTC-ANN) of Dutch folk songs and The Session corpus of Irish traditional folk dance tunes. +Three demo notebooks are supplied in ```./FoNN/demo_notebooks``` directory. These demos illustrate FoNN's feature extraction, pattern extraction, and similarity search tools as applied to the MTC-ANN corpus: + +- [feature_extraction_demo.ipynb](https://github.com/polifonia-project/folk_ngram_analysis/blob/master/demo_notebooks/feature_extraction_demo.ipynb): Reads the symbolic MTC-ANN music corpus from **kern format; extracts feature sequence data and writes to csv at ```./FoNN/mtc_ann_corpus/feature_sequence_data``` +- [pattern_extraction_demo.ipynb](https://github.com/polifonia-project/folk_ngram_analysis/master/demo_notebooks/pattern_extraction_demo.ipynb): Reads the feature sequence data outputted by the above notebook, extracts unique feature patterns and counts their occurrences per tune across the corpus. Writes output to ```./FoNN/mtc_ann_corpus/pattern_corpus```. +- [similarity_search_demo.ipynb](https://github.com/polifonia-project/folk_ngram_analysis/blob/master/demo_notebooks/similarity_search_demo.ipynb): Using the data outputted by the notebook above, for a user-selectable query tune, this demo detects similar tunes across the corpus via FoNN's three novel pattern-based similarity metrics. Writes output to ```./FoNN/mtc_ann_corpus/similarity_results``` + +The repo also contains a data extraction and processing pipline to generate inputs for Polifonia [Patterns Knowledge Graph (KG)](https://github.com/polifonia-project/patterns-knowledge-graph). Two Jupyter notebooks which run this pipeline are stored in ```./FoNN/pattern_knowledge_graph_pipeline``` directory: +- ```./FoNN/pattern_knowledge_graph_pipeline/patterns_kg_data_extraction.ipynb``` runs FoNN's pattern extraction tools to extract corpus data. +- ```./FoNN/pattern_knowledge_graph_pipeline/patterns_kg_data_processing.ipynb``` combines pattern data, feature sequence data and descriptors for each tune in the corpus and writes this data to a corpus-level Pickle file matching the Patterns KG input requirements. + + +NOTE: Deliverable 3.4 of the Polifonia project describes the context and research informing development of these tools. It will be published on [Cordis](https://cordis.europa.eu/project/id/101004746/it) later this year (2023). + + +## FoNN -- Polifonia components: + +1. **FoNN - FOlk _N_-gram aNalysis** + * 1.1. Tools for extraction of feature sequence data from symbolic music document files: [feature_sequence_extraction_tools.py](https://github.com/polifonia-project/folk_ngram_analysis/blob/master/FoNN/feature_sequence_extraction_tools.py). + * 1.2. Tools to extract and count occurrences of unique local patterns from the feature sequence data: [pattern_extraction.py](https://github.com/polifonia-project/folk_ngram_analysis/blob/master/FoNN/pattern_extraction.py). + * 1.3. Tools to explore pattern-based similarity between tunes within a corpus: [similarity_search.py](https://github.com/polifonia-project/folk_ngram_analysis/blob/master/FoNN/similarity_search.py) + * 1.4 Copies of two test music datasets: + - [The Meertens Tune Collection Annotated Corpus](https://www.liederenbank.nl/mtc/) + - [The Session](https://thesession.org) + * 1.5 Patterns KG data extraction and processing pipeline: + [patterns_knowledge_graph_pipeline](https://github.com/polifonia-project/folk_ngram_analysis/blob/master/FoNN/patterns_knowledge_graph_pipeline) + + +2. **Ceol Rince na hÉireann (CRÉ) corpus** + * 2.1. For the associated *Ceol Rince na hÉireann* corpus of 1,195 monophonic Irish traditional dance tunes in ABC and MIDI formats, please see: [./cre_corpus/readme.md](https://github.com/polifonia-project/folk_ngram_analysis/blob/master/cre_corpus/readme.md). +3. **Root Note Detection** + * 3.1. Work-in-progress on automatic detection of musical root for each tune in the corpus, please see: [/.root_key_detection/README.md](https://github.com/polifonia-project/folk_ngram_analysis/blob/master/root_note_detection/README.md) + + +## FoNN - Requirements + +To ensure FONN runs correctly, please navigate to local repo root directory and run the following in Terminal: + +``` pip install -r requirements.txt ``` + + +## FoNN - preprocessing step for ABC corpora + +NOTE: *The Session* and *CRÉ* corpora are provided in both ABC Notation and MIDI formats. + +- To ingest a corpus in ABC Notation format, first install the ABC2MIDI external dependency, which can be downloaded directly [here](https://ifdo.ca/~seymour/runabc/abcMIDI-2022.06.14.zip). For information on ABC2MIDI, please see the project [documentation](https://abcmidi.sourceforge.io). + +- If ingesting a corpus in ABC Notation, first convert to MIDI by running the ```./FoNN/abc_ingest.py``` script. This preliminary step uses ABC2MIDI to encode a specific 'beat stress model' into the MIDI output, which is used later in the workflow to filter data for rhythmically-accented notes. Such higher-level representation of melody is of particular interest in the study of Irish and related European & North American folk musics. + +- The workflow from here onwards is the same for corpora originating in all formats: if a corpus does not originate in ABC Notation, please skip to section 1.1. + + +## 1. FoNN - FOlk _N_-gram aNalysis: running the tools + +1.1. **Reading a corpus** + +- Running ```.FoNN/demo_notebooks/feature_extraction_tools_demo.ipynb``` extracts feature sequence data from an input corpus via ```FoNN.feature_sequence_extraction_tools.Corpus``` class. +- By default this notebook reads the corpus at ```./FoNN/mtc_ann_corpus/krn``` and outputs feature sequence data to ```./FoNN/mtc_ann_corpus/feature_sequence_data```. +- Input path and format can be edited as desired, while output will always write to ```./FoNN/[corpus name]/feature_sequence_data``` subdirectory. +- This notebook extracts 16 feature sequences in what we term note-level, duration-weighted note-level, and accent-level representations, as explained below: +1. Note-level: for every music score document in the corpus, each note is represented via 16 feature values. +2. Duration-weighted note-level: for every music score document in the corpus, each 1/8 note temporal increment is represented via 16 feature values. +3. Accent-level: for every music score document in the corpus, accented on-the-beat notes are represented via 16 feature values while other less metrically significant notes are dropped. +- Throughout the FoNN toolkit, these levels of data granularity are specified via the following string names: +1. note-level: 'note' +2. duration-weighted note-level: 'duration_weighted' +3. accent-level: 'accent' +- The 16 musical features extracted for each note are: +``` +-- 'midi_note_num': Chromatic pitch represented as MIDI number +-- 'onset': note onset (1/8 notes) +-- 'duration': note (1/8 notes) +-- 'velocity': MIDI velocity +-- 'diatonic_note_num': Diatonic pitch +-- 'beat_strength' -- music21 beatStrength attribute +-- 'chromatic_pitch_class': Pitch normalised to a single octave, represented as an integer between 0-11. +-- 'bar_num': Bar number +-- 'relative_chromatic_pitch': Chromatic pitch relative to the root note or tonal centre of the input sequence. +-- 'relative_diatonic_pitch': Diatonic pitch relative to the root note or tonal centre of the input sequence. +-- 'chromatic_scale_degree': Chromatic pitch class relative to the root note or tonal centre of the input sequence. +-- 'diatonic_scale_degree': Diatonic pitch class relative to the root note or tonal centre of the input sequence. +-- 'chromatic_interval': Change in chromatic pitch between two successive notes in the input sequence +-- 'diatonic_interval': Change in diatonic pitch between two successive notes in the input sequence +-- 'parsons_code': simple melodic contour. Please see Tune.extract_parsons_codes() docstring for detailed explanation. +-- 'parsons_cumsum': cumulative Parsons code values. +``` +- For more detail on how to customise inclusion/exclusion of features and choice of output data level, please refer to ```./FoNN/demo_notebooks/feature_extraction_tools_demo.ipynb```. + + +1.2. **Extracting patterns and counting their occurrences:** + +- For a user-selected musical feature, ```/FoNN/demo_notebooks/pattern_extraction_demo.ipynb``` uses FoNN.pattern_extraction.NgramPatternCorpus class to extract all unique *n*-gram patterns from the input corpus. The default feature is 'diatonic_pitch_class' but other features can be selected by the user from the list above in Section 1.1. +- Default input data is the MTC-ANN corpus feature sequence data at ```./FoNN/mtc_ann_corpus/feature_sequence_data```, but pattern extraction can be applied to any other symbolic corpus which has first been processed via FoNN's feature extraction pipeline as described in section 1.1. +- A pattern is defined as a subsequence of length between 3 and 12 elements which occurs at least once in the corpus. All patterns following this definition which occur in the corpus are stored in an array. Their occurrences in every tune in the corpus are counted and stored in a sparse matrix. These counts are weighted and converted to TF-IDF values to supress frequent-but-insignificant 'stop word' patterns. These outputs are the core input requirements for FoNN's similarity search tool; they are stored in ```./FoNN/[corpus name]/pattern_corpus``` dir. + + +1.3. **Pattern-based tune similarity** + +- The ```/FoNN/demo_notebooks/similarity_search_demo.ipynb``` notebook contains sample similarity searches for a user-selected query tune from the MTC-ANN corpus via FoNN.similarity_search. +- Results are returned which rank other tunes in the corpus by their similarity to the candidate tune. These are obtained via FoNN's three novel metrics: 'motif', 'incipit and cadence', and 'TFIDF'. +1. 'motif': +First a representative pattern is extracted from the query tune via maximal tfidf. Similar patterns to this search term pattern are detected via Levenshtein distance, with only one single edit permitted. The number of similar patterns per tune in the corpus is calculated, normalised by tune length, and returned as a +tune-similarity metric. +2. 'incipit and_cadence': +An extended version of a traditional musicological incipit search. +Structurally-important incipit and cadence subsequences are extracted from all tunes in the corpus and +compared via pairwise edit distance against the query tune. Users can select from three available edit distance metrics: +Levenshtein distance; Hamming distance; and a custom-weighted Hamming distance in which musically-consonant +substitutions are penalised less than dissonant substitutions. The edit distance output is taken as a +tune-dissimilarity metric. +3. 'tfidf': +A classical IR baseline methodology: the Cosine similarity between TFIDF vectors of all tunes in the corpus is taken as a tune similarity metric. +- All results are displayed in the notebook and automatically written to csv files at ```./FoNN/[corpus name]/similarity_results```. + +1.4. **Test music datasets** +- [The Meertens Tune Collection Annotated Corpus (MTC-ANN) version 2.0.1](https://www.liederenbank.nl/mtc/): 360 folk song melodies from the Meertens Instituut's Database of Dutch Songs, in **kern and MIDI formats. Stored in ```./FoNN/mtc_ann_corpus``` dir. +- [The Session](https://thesession.org): An online, crowd-sourced collection of 40,000+ monophonic Irish traditional dance tunes in ABC Notation and MIDI formats. Stored in ```./FoNN/the_session_corpus``` dir. + +1.5 **Patterns KG data extraction and processing pipeline** +- Step 1: Run ```./FoNN/pattern_knowledge_graph_pipeline/patterns_kg_data_extraction.ipynb``` to extract corpus feature sequence and pattern data. +- Step 2: Run ```./FoNN/pattern_knowledge_graph_pipeline/patterns_kg_data_processing.ipynb``` to combine feature sequence and pattern data for the entire corpus and write to Pickle file for KG creation via Polifonia [Patterns Knowledge Graph (KG)](https://github.com/polifonia-project/patterns-knowledge-graph) repo. +- Further information on user-customization of pipeline parameters is provided within the notebooks. + + +## 2. Ceol Rince na hÉireann (CRÉ) MIDI corpus [legacy component] + +- A new version of the previously-existing *Ceol Rince na hÉireann* corpus, containing 1,195 monophonic Irish traditional dance tunes. the corpus in provided in ABC Notation and in MIDI. Please see: [./cre_corpus/readme.md](https://github.com/polifonia-project/folk_ngram_analysis/blob/master/cre_corpus/README.md) for more information. + +* Highlights: + * Corpus title: _Ceol Rince na hÉireann_ + * Source: Black, B 2020, [The Bill Black Irish tune archive homepage](http://www.capeirish.com/webabc), viewed 5 January 2021. + * Contents: 1,195 traditional Irish dance tunes, each of which is represented as a monophonic MIDI file. Also included is ```roots.csv```, a file giving the expert-annotated root note for every file in the corpus as a chromatic integer pitch class. + +## 3. Root Note Detection [legacy component] + + +Work-in-progress on automatic detection of musical root for each tune in the corpus. Please see: [/.root_key_detection/README.md](https://github.com/polifonia-project/folk_ngram_analysis/blob/master/root_note_detection/README.md). + This component contains a jupyter notebook script that makes use of ```cre_root_detection.csv```, which is a file containing pitch class values assigned to each piece of music in the corpus by the above-mentioned root-detection metrics outputted by ```setup_corpus.py```. From this input data, the script makes use of machine learning methods to classify the root note. The root note detection notebook can be accessed at [/.root_note_detection/root_note_detection.ipynb](https://github.com/polifonia-project/folk_ngram_analysis/blob/master/root_note_detection/root_note_detection.ipynb). + +## Attribution + +[![DOI](https://zenodo.org/badge/427469033.svg)](https://zenodo.org/badge/latestdoi/427469033) + +If you use the code in this repository, please cite this software as follows: +``` +@software{diamond_fonn_2022, + address = {Galway, Ireland}, + title = {% raw %}{{{% endraw %}FONN} - {FOlk} {N}-gram {aNalysis}}, + shorttitle = {% raw %}{{{% endraw %}FONN}}, + url = {https://github.com/polifonia-project/folk_ngram_analysis}, + publisher = {National University of Ireland, Galway}, + author = {Diamond, Danny and Shahid, Abdul and McDermott, James}, + year = {2022}, +} +``` + +## License +This work is licensed under CC BY 4.0, https://creativecommons.org/licenses/by/4.0/ diff --git a/content/_spice-h2020/ecosystem/content/_polifonia-project/folk_ngram_analysis/cre_corpus/LICENSE.md b/content/_spice-h2020/ecosystem/content/_polifonia-project/folk_ngram_analysis/cre_corpus/LICENSE.md new file mode 100644 index 00000000..00d2e135 --- /dev/null +++ b/content/_spice-h2020/ecosystem/content/_polifonia-project/folk_ngram_analysis/cre_corpus/LICENSE.md @@ -0,0 +1,24 @@ +This is free and unencumbered software released into the public domain. + +Anyone is free to copy, modify, publish, use, compile, sell, or +distribute this software, either in source code form or as a compiled +binary, for any purpose, commercial or non-commercial, and by any +means. + +In jurisdictions that recognize copyright laws, the author or authors +of this software dedicate any and all copyright interest in the +software to the public domain. We make this dedication for the benefit +of the public at large and to the detriment of our heirs and +successors. We intend this dedication to be an overt act of +relinquishment in perpetuity of all present and future rights to this +software under copyright law. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. +IN NO EVENT SHALL THE AUTHORS BE LIABLE FOR ANY CLAIM, DAMAGES OR +OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, +ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR +OTHER DEALINGS IN THE SOFTWARE. + +For more information, please refer to \ No newline at end of file diff --git a/content/_spice-h2020/ecosystem/content/_polifonia-project/folk_ngram_analysis/cre_corpus/README.md b/content/_spice-h2020/ecosystem/content/_polifonia-project/folk_ngram_analysis/cre_corpus/README.md new file mode 100644 index 00000000..c0c965fe --- /dev/null +++ b/content/_spice-h2020/ecosystem/content/_polifonia-project/folk_ngram_analysis/cre_corpus/README.md @@ -0,0 +1,94 @@ +--- +component-id: cre_corpus +name: Ceol Rince na hÉireann MIDI corpus +brief-description: A corpus of 1,195 monophonic instrumental Irish traditional dance tunes. +type: Corpus +release-date: 15/06/2022 +release-number: v0.7.0.1-dev +work-package: +- WP3 +licence: +- CC_BY_v4 +links: +- https://github.com/polifonia-project/folk_ngram_analysis/corpus +- http://www.capeirish.com/webabc +- https://zenodo.org/record/5768216#.YbEAbS2Q3T8 +credits: +- https://github.com/danDiamo +- https://github.com/ashahidkhattak +- http://www.capeirish.com/ +--- + + +## About the dataset + +**Corpus title:** _Ceol Rince na hÉireann_ + +**Source:** [Black, B 2020, _The Bill Black Irish tune archive homepage_, viewed 5 January 2021.](http://www.capeirish.com/webabc) + +**Contents:** 1,195 traditional Irish dance tunes, represented in [MIDI](https://github.com/polifonia-project/folk_ngram_analysis/tree/master/cre_corpus/MIDI) and [ABC Notation](https://github.com/polifonia-project/folk_ngram_analysis/tree/master/cre_corpus/abc). + +Between 1963 and 1999, Irish State publishing companies Oifig an tSolatáthair and An Gúm issued five printed volumes of tunes from the collections of Breadán Breathnach (1912-1985) under the series title _Ceol Rince na hÉireann_ (Dance Music of Ireland, hereafter _CRÉ_). The five volumes of _CRÉ_ contain 1,208 traditional tunes, a subset of Breathnach's more extensive personal collection of 5,000+ melodies. The collection has been transcribed into ABC notation by American traditional music researcher Bill Black, and made freely available online via his [personal website]((http://www.capeirish.com/webabc)). Addition of alternative tune versions and variation in numbering of unique melodies has resulted in a total of 1,224 tunes in the Bill Black ABC corpus. This resource has been used in previous research work, for example it makes up part of a larger aggregated corpus used in the [_Tunepal_](https://tunepal.org/index.html) Music Information Retrieval app. We have created a new cleaned and annotated version of the corpus, from which feature sequence data can be extracted and analysed via Polifonia's [FONN](https://github.com/polifonia-project/folk_ngram_analysis) music pattern analysis toolkit. + +NOTE: Please see [corpus_demo.ipynb](https://github.com/polifonia-project/folk_ngram_analysis/blob/master/cre_corpus/corpus_demo.ipynb) for a Jupyter notebook exploring the corpus data. + +Deliverable 3.3 of the Polifonia project will describe the context and research in more detail. It will be published on [Cordis](https://cordis.europa.eu/project/id/101004746/it). + + +## About corpus pre-processing methodology + +Bill Black's ABC version of the _CRÉ_ collection has been manually edited and annotated, and converted to MIDI. This work included: +* Removal of alternative tune versions, so that the ABC collection more accurately reflects the original print collection. +* Removal of non-valid ABC notation characters. +* Editing of repeat markers to ensure accurate MIDI output. +* Manual assignment of root note (as chromatic pitch class) for every piece of music in the corpus. This data is stored in [roots.csv]( https://github.com/polifonia-project/folk_ngram_analysis/tree/master/cre_corpus/roots.csv), which is used to derive key-invariant secondary feature sequence data from the MIDI files. + + +## Description of the data + +``` +corpus/ + -MIDI/ + -1,195 monophonic MIDI (.mid) files, one representing each tune. + -abc/ + -1 ABC NOtation corpus file (.abc) containing scores for all 1,195 tunes. + -roots.csv + -README.md + -LICENSE.md + +``` + +- ```corpus``` directory contains roots.csv, this README.md, and a LICENSE.md file. + +- Roots.csv holds two columns with one row per each MIDI file in the corpus: + - 'title': MIDI file name (tune title) + - 'root': expert-assigned root note of each melody, represented as a [chromatic pitch class](https://en.wikipedia.org/wiki/Pitch_class) (i.e.: An integer value from C=0 through B=11). + +image +

+ +- To convert corpus form ABC Notation to MIDI format, please download the corpus data and run FONN [abc_ingest.py](https://github.com/polifonia-project/folk_ngram_analysis/blob/master/abc_ingest.py) script. Please see [FONN README.md](https://github.com/polifonia-project/folk_ngram_analysis/blob/master/README.md) for further information. + +- To extract feature sequence data from the MIDI corpus, please download the corpus data and run FONN [setup_corpus.py](https://github.com/danDiamo/music_pattern_analysis/blob/master/setup_corpus.py) script. Please see [FONN README.md](https://github.com/polifonia-project/folk_ngram_analysis/blob/master/README.md) for further information. + + +## Attribution + +If you use the code in this repository, please cite this software as follow: +``` +@software{diamond_fonn_2022, + address = {Galway, Ireland}, + title = {% raw %}{{{% endraw %}FONN} - {FOlk} {N}-gram {aNalysis}}, + shorttitle = {% raw %}{{{% endraw %}FONN}}, + url = {https://github.com/polifonia-project/folk_ngram_analysis}, + publisher = {National University of Ireland, Galway}, + author = {Diamond, Danny and Shahid, Abdul and McDermott, James}, + year = {2022}, +} +``` + +## License + +This work is licensed under CC BY 4.0, https://creativecommons.org/licenses/by/4.0/ + + diff --git a/content/_spice-h2020/ecosystem/content/_polifonia-project/folk_ngram_analysis/root_note_detection/README.md b/content/_spice-h2020/ecosystem/content/_polifonia-project/folk_ngram_analysis/root_note_detection/README.md new file mode 100644 index 00000000..ca8004e4 --- /dev/null +++ b/content/_spice-h2020/ecosystem/content/_polifonia-project/folk_ngram_analysis/root_note_detection/README.md @@ -0,0 +1,66 @@ +--- +component-id: root_note_detection +name: Root Note Detection +brief-description: Work-in-progress on root note detection on a corpus of monophonic Irish folk tunes. +type: Repository +release-date: 20/05/2022 +release-number: v0.7.0.1-dev +work-package: +- WP3 +licence: +- CC_BY_v4 +links: +- https://github.com/polifonia-project/folk_ngram_analysis/blob/master/root_note_detection/root_note_detection.ipynb +- https://zenodo.org/record/5768216#.YbEAbS2Q3T8 +credits: +- https://github.com/danDiamo +- https://github.com/ashahidkhattak +--- + + +# Root Note Detection + +The files in this folder are related to the Root note detection task. The notebook exploits monophonic Irish folk tunes processed data (that can be found in ```cre_root_detection.csv``` file) and with help of machine learning models predicts the root note of a tune. Determination of the root note of each piece of music in the corpus under investigation is a key foundational step in FONN. Accurate root note data allows reliable calculation of key-invariant chromatic pitch class sequences, which have been the primary input for our pattern analysis and melodic similarity work. + +NOTE: Deliverable 3.3 of the Polifonia project describes the context and research in more detail. + +To use the best trained model for root-note prediction tasks, follow the **demo notebook** [./RootNoteDemo.ipynb](https://github.com/polifonia-project/folk_ngram_analysis/blob/master/root_note_detection/RootNoteDemo.ipynb). + + +### Prerequisites +This component requires the ```cre_root_detection.csv```. This file contains the processed data for each tune in the Ceol Rince na hÉireann (CRE) corpus. please see: [/.root_key_detection/cre_root_detection.csv](https://github.com/polifonia-project/folk_ngram_analysis/blob/master/root_note_detection/cre_root_detection.csv) + + +In this deliverable, we employed a factorial design experiment for Decision Tree, Random Forest, and Naive Bayes algorithms. We used a comprehensive list of hyperparameters to select the top-performing models. We also conducted experiments using SMOTE to generate a synthetic balance dataset. Finally, evaluation was done on an unseen dataset, and the obtained results are superior to state-of-the-art models. + +Following is the summary of the current work. The **experiment notebook** [./root-note-detection.ipynb](https://github.com/polifonia-project/folk_ngram_analysis/blob/master/root_note_detection/root-note-detection.ipynb) reads the Ceol Rince na hÉireann (CRE) corpus CSV file and then performs the following steps: + +* 1- Exploratory Data Analysis, such as null value, classes count, correlations, etc. +* 2- Global settings are defined to control feature selection +* 3- Multiple dataset are created for model development and its evaluation +* 4- Minority classes are balanced with help of SMOTE +* 5- Classification report of state-of-the-art models for root note detection are generated for comparison +* 6- Factorial design experimental setup is developed to evaluate different classification algorithms such as Decision Tree, RandomForest, NaiveBayes +* 7- The best models are selected, and finally they are compared with SOA models, and the best model is saved. + +The **demo notebook** [./RootNoteDemo.ipynb](https://github.com/polifonia-project/folk_ngram_analysis/blob/master/root_note_detection/RootNoteDemo.ipynb) shows how to use the best trained model for new prediction tasks. + +## Attribution + +[![DOI](https://zenodo.org/badge/427469033.svg)](https://zenodo.org/badge/latestdoi/427469033) + +If you use the code in this repository, please cite this software as follow: +``` +@software{danny_diamond_2022_6566379, + author = {Danny Diamond and + Abdul Shahid and + James McDermott}, + title = {% raw %}{{{% endraw %}polifonia-project/folk\_ngram\_analysis: FONN + v0.5dev}}, + month = may, + year = 2022 +} +``` + +## License +This work is licensed under CC BY 4.0, https://creativecommons.org/licenses/by/4.0/ diff --git a/content/_spice-h2020/ecosystem/content/_polifonia-project/instrument-ontology/README.md b/content/_spice-h2020/ecosystem/content/_polifonia-project/instrument-ontology/README.md new file mode 100644 index 00000000..7f55b9c2 --- /dev/null +++ b/content/_spice-h2020/ecosystem/content/_polifonia-project/instrument-ontology/README.md @@ -0,0 +1,88 @@ +# Music Instrument ontology +The Music Instrument ontology module represents musical instruments as mediums of performance and their technical properties. + +[![DOI](https://zenodo.org/badge/372536364.svg)](https://zenodo.org/badge/latestdoi/372536364) +[![License: CC BY 4.0](https://img.shields.io/badge/License-CC_BY_4.0-lightgrey.svg)](https://creativecommons.org/licenses/by/4.0/) + +> 🔗 Ontology URI: [https://w3id.org/polifonia/ontology/instrument/](https://w3id.org/polifonia/ontology/music-instrument/) + +The Instrument Module describes musical instruments as mediums of performance +and their technical properties. Given that numerous taxonomies of instruments +into groups and families exist (e.g. Hornbostel-Sachs, MIMO, Mu- sicBrainz) and +finding common categorisations is an open problem [1], our module provides an +abstraction capable to express arbitrary classifications. This is achieved by +leveraging the Information-Realisation and the Collection ODPs. Overall, the +module allows to: (i) refer to instruments as entities (an instrumen- tation of +a piece for “piano” and “viola”) as well as conceptually (e.g. a viola has 4 +strings); (ii) support the integration with different taxonomies and +vocabularies, such as [2]; (iii) describe the evolution of instruments in time +and space (e.g. a viola as a cultural heritage object being relocated). +This provides a foundational level where contributors can “plug” their +instrument-specific ontologies [3]. + +![instrument module diagram](diagrams/music-instrument-main-entities.png) + +--- + +## Competency questions addressed +- Which is the physical realization of an instrument? +- Which are the parts of an instrument? +- Who invented an instrument? +- When was an instrument invented? +- Where was an instrument realization built? +- When was an instrument realization built? +- Who built an instrument realization? +- Which is the current location of an instrument realization? +- Which are the locations of an instrument realization during its life cycle? + +## Competency questions planned + + +## Examples of SPARQL queries +- Which is the physical realization of an instrument? +``` +PREFIX inst: +PREFIX core: +SELECT DISTINCT ?instrument ?instrumentRealization +WHERE { ?instrument core:isRealizedBy ?instrumentRealization . +} +``` + +- Who invented an instrument? +``` +PREFIX inst: +SELECT DISTINCT ?instrument ?builder +WHERE { ?instrument mi:wasInventedBy ?builder . +} +``` + +## Related ontologies + +### External imports +- [ArCo Location Ontology module](https://w3id.org/arco/ontology/location) + +### Aligned ontologies + +## Statistics +Considering that this module imports other modules of the network and the ArCo ontology, relevant statistics are: +- number of classes: 161 +- number of object properties: 209 +- number of datatype properties: 41 +- number of logical axioms: 876 + +## Datasets +TODO + +## License + +This work is licensed under a +[Creative Commons Attribution 4.0 International License](http://creativecommons.org/licenses/by/4.0/). + + +## References + +[1] Kolozali, S., Barthet, M., Fazekas, G., Sandler, M.B.: Knowledge representation issues in musical instrument ontology design. In: ISMIR. pp. 465–470 (2011) + +[2] Lisena, P., Todorov, K., Cecconi, C., Leresche, F., Canno, I., Puyrenier, F., Voisin, M., Le Meur, T., Troncy, R.: Controlled vocabularies for music metadata. In: IS- MIR: International Society for Music Information Retrieval (2018) + +[3] Zanoni, M., Setragno, F., Sarti, A., et al.: The violin ontology. In: Proc. of the 9th Conference on Interdisciplinary Musicology (CIM14). Citeseer (2014) \ No newline at end of file diff --git a/content/_spice-h2020/ecosystem/content/_polifonia-project/instrument-ontology/header.md b/content/_spice-h2020/ecosystem/content/_polifonia-project/instrument-ontology/header.md new file mode 100644 index 00000000..7285c465 --- /dev/null +++ b/content/_spice-h2020/ecosystem/content/_polifonia-project/instrument-ontology/header.md @@ -0,0 +1,62 @@ +--- +component-id: https://w3id.org/polifonia/ontology/instrument/ +type: Ontology +name: Music Instrument Ontology +description: An ontology to describe instruments as mediums of performance and their technical properties. +image: diagrams/music-instrument-main-entities.png +work-package: +- WP2 +pilot: +- INTERLINK +- ORGANS +- BELLS +project: polifonia-project +resource: ontology/music-instrument.owl +release-date: 13/04/2023 +release-number: v1.0 +release-link: https://github.com/polifonia-project/ontology-network/releases +doi: 10.5281/zenodo.7919970 +changelog: https://github.com/polifonia-project/ontology-network/releases +licence: +- CC-BY_v4 +copyright: "Copyright (c) 2023 Music Instrument Contributors" +contributors: # replace these with the GitHub URL of each contributor +- Jacopo de Berardinis +- Valentina Anita Carriero +- Fiorela Ciroku +related-components: +- informed-by: + - polifoniacq-dataset +- reuses: # any reused/imported ontology + - https://w3id.org/polifonia/ontology/core/ +- story: # any related story this ontology addresses + - Paul#1_OrganComparison + - Paul#2_ResourceReliability + - Frank#1_OrganKnowledge + - Amy#1_OrganTrends + - Amy#2_OrganBuilders +- persona: # any persona this ontology addresses + - Paul + - Amy + - Frank +--- + + +# Music Instrument Ontology + +The Instrument Module describes musical instruments as mediums of performance +and their technical properties. Given that numerous taxonomies of instruments +into groups and families exist (e.g. Hornbostel-Sachs, MIMO, Mu- sicBrainz) and +finding common categorisations is an open problem, our module provides an +abstraction capable to express arbitrary classifications. This is achieved by +leveraging the Information-Realisation and the Collection ODPs. Overall, the +module allows to: (i) refer to instruments as entities (an instrumen- tation of +a piece for “piano” and “viola”) as well as conceptually (e.g. a viola has 4 +strings); (ii) support the integration with different taxonomies and +vocabularies; (iii) describe the evolution of instruments in time +and space (e.g. a viola as a cultural heritage object being relocated). +This provides a foundational level where contributors can “plug” their +instrument-specific ontologies. + +[Link to the website](https://github.com/polifonia-project/music-instrument-ontology) diff --git a/content/_spice-h2020/ecosystem/content/_polifonia-project/lharp/README.md b/content/_spice-h2020/ecosystem/content/_polifonia-project/lharp/README.md new file mode 100644 index 00000000..f09d7de6 --- /dev/null +++ b/content/_spice-h2020/ecosystem/content/_polifonia-project/lharp/README.md @@ -0,0 +1,109 @@ +--- +component-id: LHARP +name: Local Harmonic Agreement based on Recurrent Patterns +brief-description: A method for computing harmonic similarity from symbolic sequences, based on shared recurring harmonic structures. +type: Repository +release-date: 15-10-2021 +release-number: v0.1 +work-package: WP2, WP3 +pilot: INTERLINK +keywords: + - patterns + - chords +changelog: n/a. +licence: +- CC-BY-NC_v4 +release link: n/a. +image: n/a. +logo: n/a. +demo: https://polifonia-project.github.io/musilar-preview/ +links: https://polifonia-project.github.io/musilar-preview/ +running-instance: n/a +credits: J. de Berardinis (KCL), A. Poltronieri (UniBo) +related-components: + - dataset + - Isophonics + - Schubert-Winterreise dataset + - JAAH +bibliography: A simple local harmonic similarity function based on shared repeated chord structures (manuscript in progress) +--- + +# Local Harmonic Agreement based on Recurrent Patterns (LHARP) + +A novel method for harmonic similarity that emphasises shared repeated patterns among symbolic chord sequences, to accommodate a wide set of applications. Compared to other harmonic similarity methods on symbolic music, LHARP enables more explorative studies, as it can establish links when local harmonic patterns are found repeated in both sequences, while retaining global information to a lesser extent. This is also complemented with *"The Harmonic Network"* a computational tool allowing users to explore music collections by visualising harmonic similarities among tracks and interacting with the resulting graph to discover nontrivial relationships among authors, composers, and pieces. + +

+ +

+ + + +## Introduction to LHARP + +The proposed harmonic similarity in this chapter is formulated in a way to emphasise shared repeated patterns among two arbitrary symbolic sequences, hence it provides a general framework for the analysis of symbolic streams based on their local structures. +As illustrated in the diagram above, the domain-specific parts are the pre-processing and encoding steps, whereas the downstream part of the pipeline -- including pattern extraction and matching operations, can generalise to arbitrary symbolic (uni-modal) sequences. + + +1. **Harmonic reduction.** Chord progression are simplified before comparison – the bass note is removed as this operation improves the generalisation capabilities of the next steps, thereby producing more consistent similarity scores), and consecutively repeated chords are removed. This provides a "bird’s eye view" on the global harmonic properties of each piece. + +2. **Key-based normalisation.** Chords labels/classes in a progression need to be contextualised according to the key of the piece (defined by the tonic and the scale) before any comparison is possible. Therefore, all chord progressions are transposed to the same key: C major. This last transformation concludes the pre-processing operations. + +3. **Decomposition of chords.** The normalised harmonic sequences are then prepared for the encoding step, so that they can be used as input to the any computational procedure. Rather than further simplifying the symbolic musical content, a new encoding procedure was designed to retain the fundamental internal structure of each chord. More precisely, every chord label is decomposed into its pitch constituents – the individual pitches it is made of. For example, a C major is encoded as a multi-hot vector where the elements corresponding to the active pitches are equal to 1; all the others are 0. + +4. **Enumeration of pitch simultaneities.** To reduce the complexity of any potential polyphonic model using such sparse local representations of chords, each unique decomposition is then assigned to an index (an integer value). As it can be observed, this is akin to the common encoding approach used in natural language processing for word tokens. This results into *chord tokens* defined over the vocabulary of all possible chord decompositions. + +5. **Harmonic thumbnailing.** To identify the areas/regions of chord progressions that can be deemed as "harmonically memorable", we extract the n-grams of all possible orders -- starting from tri-grams, that repeat at least once within the progression. We call them "harmonic thumbnails", as they represent harmonic structures per se. + +6. **Shared harmonic patterns.** Finally, chord progressions are compared for similarity based on the agreement between their harmonic thumbnails. In particular, the longest harmonic structures they share is compared to the order of the longest thumbnail that occurs within each progression, independently. Therefore, depending on the harmonic patterns the two chord progressions have in common -- in relation to their internal structures, the similarity function will return a value between 0 and 1 (the higher the value, the stronger the similarity) together with the longest harmonic patterns they share. + +## The Harmonic Network + +

+ +

+ +As illustrated in the figure above, in the harmonic network nodes correspond to tracks in chord music datasets, whereas edges connect nodes if their value of harmonic similarity is greater than 0 (an harmonic match was found). +To simplify the inspection, a grey-scale colourmap visually projects the value of harmonic similarity expressed by edges: from light grey (low similarity) to plain black (high similarity). +Instead, nodes are sized according to their degree -- the number of connections/edges they have. As the graph is undirected, considering that similarity is symmetric by definition, the degree is equal to the number of tracks sharing harmonic structures. +A distinct border colour also differentiate nodes according to the music dataset they come from. +The harmonic network also provides features for the exploration of the graph; a demo LIVE version can be found [at this link](https://polifonia-project.github.io/musilar-preview/). + +# Setup, data, and examples + +All required packages and dependencies are listed in the ``requirements.txt`` file. To setup your environment, just create a new virtualenv or conda environment from your local system, clone this repository, and install all dependencies by running the following command. +``` +git clone https://github.com/polifonia-project/lharp.git && cd lharp +pip install -r ./requirements.txt +``` +## Getting chord datasets +To replicate our experiments, or simply to compare the chord progressions in the current version of the harmonic network with those of your own collection, you first need to clone our dataset repository (run the command below). +``` +cd .. && git clone https://github.com/polifonia-project/datasets.git +``` +The dataset used for this project were: +* Isophonics (pop/rock songs from Beatles and Queen) +* JAAH (jazz songs from various artists) +* Schubert Winterreise (classical tracks from Schubert). + +## How to use LHARP +You are now ready to use LHARP. Please check our [demo notebook](https://github.com/polifonia-project/lharp/blob/6f1f6e21d900cca549c82d637fa4317e8633bea1/demo_example.ipynb) for an example run. We have also implemented a documented API where each function corresponds to one of the main steps for the computation of the harmonic similarity (see the diagram above). These functions can be found [at this link](https://github.com/polifonia-project/lharp/blob/0f87f95fa04f94537b23be3b5bfeacb24cc63cd2/src/lharp_api.py) and they also allow to compute LHARP among tracks in a given collection (``harmonic_similarity_intra``) and against those tracks we used for our experiments (``harmonic_similarity_inter``). + + +# Attribution + +[![DOI](https://zenodo.org/badge/436261336.svg)](https://zenodo.org/badge/latestdoi/436261336) + +Please, cite this paper if you use the code in this repository as part of a published research project: + +``` +inproceedings{deberardinis2021lharp, + title={A local harmonic similarity function based on shared repeated chord structures}, + author={de Berardinis, Jacopo and Meroño-Peñuela, Albert and Poltronieri, Andrea and Presutti, Valentina}, + booktitle={Manuscript under review}, + year={2021} +} +``` + +# License + +Creative Commons License
This work is licensed under a Creative Commons Attribution-NonCommercial 4.0 International License. diff --git a/content/_spice-h2020/ecosystem/content/_polifonia-project/lharp/setup/README.md b/content/_spice-h2020/ecosystem/content/_polifonia-project/lharp/setup/README.md new file mode 100644 index 00000000..a9299036 --- /dev/null +++ b/content/_spice-h2020/ecosystem/content/_polifonia-project/lharp/setup/README.md @@ -0,0 +1 @@ +In this folder we will move all the (intermediate) data structures used to preprocess, encode, ngram-ify and compare chord sequences for harmonic similarity. The code should be adapted accordingly. \ No newline at end of file diff --git a/content/_spice-h2020/ecosystem/content/_polifonia-project/lharp/similarities/README.md b/content/_spice-h2020/ecosystem/content/_polifonia-project/lharp/similarities/README.md new file mode 100644 index 00000000..599299db --- /dev/null +++ b/content/_spice-h2020/ecosystem/content/_polifonia-project/lharp/similarities/README.md @@ -0,0 +1,28 @@ +# Similarities Pairs + +In the folder [split_global](https://github.com/polifonia-project/harmonic-similarity/tree/main/similarities/split_global) you can find more than 400 files, each of which containing 15 similarity pairs. + +The files are all ```.xlsx``` and their structure is the following: + +![Screenshot 2021-10-07 at 20 03 14](https://user-images.githubusercontent.com/44606182/136439284-73e9b792-4d41-4dbb-ad1d-00c154514c78.png) + +* ```track_1``` and ```track_2``` columns: contain the id of the two tracks among which the similarity was calculated; + +* ```pattern_track_1``` and ```pattern_track_2``` columns: contain the longest harmonic sequence that the two tracks have in common. If there are more than one longest sequence in common they are represented in different comma separated lists; + +* ```time_pattern_track_1``` and ```time_pattern_track_2``` columns: contain the time information about the longest harmonic sequence that the two tracks have in common. For each common sequence the time information is described as a tuple composed as follows ```(start_time, end_time)```. Both the time stamps are formatted as ```h:mm:ss:mmmmmmm```; + +* ```sonification_pattern_track_1``` and ```sonification_pattern_track_2``` columns: contain the link to the audio file (hosted on [SoundCloud](https://soundcloud.com/)) of the track's chords sonification. The links lead to the exact timestamp where the different patterns start within the audio track; + +* ```spotify_uri_track_1``` and ```spotify_uri_track_2``` columns: contain URI to the respective tracks on the [Spotify](https://spotify.com) streaming platform. To listen to the track, simply copy and paste the uri into your browser address bar. To listen to the common patterns, you must manually position yourself at the timestamp indicated in the ``time_pattern_track`` column. Tracks that do not have a Spotify URI can be searched for on any music streaming service: however, in this case, the accuracy of the timestamps provided is NOT guaranteed. + + +## Criteria for populating the split files + +The files containd in the [split_global](https://github.com/polifonia-project/harmonic-similarity/tree/main/similarities/split_global) folder were populated according to the following criteria: + +* similarities are not calculated between tracks with the same name; +* similarities are not calculated between tracks of the same author; +* there are no inverse similarities within the same file (if similarity is calculated between x and y it will not be calculated between y and x); +* the similarity values have been hidden to avoid biased ratings. + diff --git a/content/_spice-h2020/ecosystem/content/_polifonia-project/lharp/sonification/full-tracks/audio/README.md b/content/_spice-h2020/ecosystem/content/_polifonia-project/lharp/sonification/full-tracks/audio/README.md new file mode 100644 index 00000000..67895bb5 --- /dev/null +++ b/content/_spice-h2020/ecosystem/content/_polifonia-project/lharp/sonification/full-tracks/audio/README.md @@ -0,0 +1,5 @@ +# Instructions + +To get the audio files of the sonifications, just download the folder at the link below and extract all files in this directory. The total size should approximately 4GB on your local file system. + +https://drive.google.com/drive/folders/1aBcDyHIpn2nZZQREeVaeA2mSUAE2XdQt?usp=sharing \ No newline at end of file diff --git a/content/_spice-h2020/ecosystem/content/_polifonia-project/lharp/sonification/soundfonts/README.md b/content/_spice-h2020/ecosystem/content/_polifonia-project/lharp/sonification/soundfonts/README.md new file mode 100644 index 00000000..f8d6ed44 --- /dev/null +++ b/content/_spice-h2020/ecosystem/content/_polifonia-project/lharp/sonification/soundfonts/README.md @@ -0,0 +1 @@ +This directory should contain any soundfont used for the sonification of MIDIs representing chord progressions. \ No newline at end of file diff --git a/content/_spice-h2020/ecosystem/content/_polifonia-project/licences/KG.md b/content/_spice-h2020/ecosystem/content/_polifonia-project/licences/KG.md new file mode 100644 index 00000000..2f1cb1c7 --- /dev/null +++ b/content/_spice-h2020/ecosystem/content/_polifonia-project/licences/KG.md @@ -0,0 +1,32 @@ +--- +component-id: licences +name: Licences Knowlegde Graph +description: The Polifonia Licences KG, containing licence information of the resources from third-parties that the project reused. +type: KnowledgeGraph +release-date: 28/04/2023 +release-number: v0.3 +work-package: +- WP1 +- WP2 +licence: +- CC-BY_v4 +doi: 10.5281/zenodo.7875034 +links: +credits: +- "Enrico Daga " +- "Jason Carvalho " +- "Marco Gurrieri" +related-components: +- generated-by: + - licences-pipeline +- reuses: + - Dalicc, https://www.dalicc.net/ +--- +# Licences Knowlegde Graph + +The Polifonia Licences KG, containing licence information of the resources from third-parties that the project reused. +The Licences Knowledge Graph is generated by this [KG construction pipeline](Pipeline.md). + +The dataset can be found in the folder [knowledgegraph](knowledgegraph). + + diff --git a/content/_spice-h2020/ecosystem/content/_polifonia-project/licences/Pipeline.md b/content/_spice-h2020/ecosystem/content/_polifonia-project/licences/Pipeline.md new file mode 100644 index 00000000..d4b2bc1b --- /dev/null +++ b/content/_spice-h2020/ecosystem/content/_polifonia-project/licences/Pipeline.md @@ -0,0 +1,62 @@ +--- +component-id: licences-pipeline +name: Licences KG generation pipeline +description: Resources for the Polifonia Licences KG, containing licence information of the resources from third-parties that the project reused. +type: Software +release-date: 28/04/2023 +release-number: v0.3 +work-package: +- WP2 +licence: +- "CC-BY_v4" +doi: 10.5281/zenodo.7875034 +links: +credits: +- "Enrico Daga " +- "Jason Carvalho " +- "Marco Gurrieri" +related-components: +- reuses: + - sparql-anything-cli + - Dalicc, https://www.dalicc.net/ +--- +# Licences KG generation pipeline + +This project includes resources for the Polifonia Licences KG, containing licence information of the resources from third-parties that the project reused. + +In what follows, fx refers to the following command line `java -jar sparql-anything--.jar`. + +## Download licence descriptions from Dalicc +We reuse a catalogue of machine readable licences from the [Dalicc project](https://www.dalicc.net/). + +``` +fx -q queries/harvest-dalicc.sparql -f TTL -o knowledgegraph/dalicc.ttl +``` + +## Knowledge Graph Construction + +### Generate `datasets-licences.ttl` +From the spreadhseet in `data/` to the RDF file. + +``` +fx -q queries/kg.sparql -f TTL -o knowledgegraph/datasets-licences.ttl +``` + +### Views + +``` +fx -q queries/terms-view.sparql -l knowledgegraph/ +``` + +### Stats + +``` +fx -q queries/datasets-by-licence.sparql -l knowledgegraph/ + +fx -q queries/terms-stats.sparql -l knowledgegraph/ +``` + + + + + diff --git a/content/_spice-h2020/ecosystem/content/_polifonia-project/licences/README.md b/content/_spice-h2020/ecosystem/content/_polifonia-project/licences/README.md new file mode 100644 index 00000000..61ce3e40 --- /dev/null +++ b/content/_spice-h2020/ecosystem/content/_polifonia-project/licences/README.md @@ -0,0 +1,8 @@ +# Licences Knowlegde Graph + +This project includes resources for the Polifonia Licences KG, containing licence information of the resources from third-parties that the project reused. + +This repository includes both the [generation pipeline](Pipeline.md) and the resulting [Knowledge Graph](KG.md). + + + diff --git a/content/_spice-h2020/ecosystem/content/_polifonia-project/meetups-application/README.md b/content/_spice-h2020/ecosystem/content/_polifonia-project/meetups-application/README.md new file mode 100644 index 00000000..fc5b5ab3 --- /dev/null +++ b/content/_spice-h2020/ecosystem/content/_polifonia-project/meetups-application/README.md @@ -0,0 +1,3 @@ +# MEETUPS Pilot - Demo web application + +This repository constains the MEETUPS web application and corresponding Docker container for running the application in a webserver. diff --git a/content/_spice-h2020/ecosystem/content/_polifonia-project/meetups-application/meetups-application.md b/content/_spice-h2020/ecosystem/content/_polifonia-project/meetups-application/meetups-application.md new file mode 100644 index 00000000..9b446159 --- /dev/null +++ b/content/_spice-h2020/ecosystem/content/_polifonia-project/meetups-application/meetups-application.md @@ -0,0 +1,35 @@ +--- +component-id: meetups-application +type: WebApplication +name: MEETUPS web application +description: The MEETUPS pilot application. +image: +logo: +demo: +work-package: + - WP5 +pilot: MEETUPS +project: polifonia-project +resource: https://polifonia.kmi.open.ac.uk/MEETUPS/ +release-date: 25/05/2023 +release-number: v0.3.6 +release-link: https://github.com/polifonia-project/meetups-application/releases +doi: +changelog: https://github.com/polifonia-project/meetups-application/releases +license: + - Apache-2.0 +copyright: "Copyright (c) 2023 The Open University" +contributors: + - Jason Carvalho + - Alba Morales-Tirado + - Enrico Daga +related-components: + - reuses + - meetups-knowledge-graph + - informed-by: + - meetups-ui-design + - persona + - Ortenz + - David + - Sophie +--- diff --git a/content/_spice-h2020/ecosystem/content/_polifonia-project/meetups-application/website/README.md b/content/_spice-h2020/ecosystem/content/_polifonia-project/meetups-application/website/README.md new file mode 100644 index 00000000..7ef98ba6 --- /dev/null +++ b/content/_spice-h2020/ecosystem/content/_polifonia-project/meetups-application/website/README.md @@ -0,0 +1,65 @@ +# [Start Bootstrap - SB Admin 2](https://startbootstrap.com/theme/sb-admin-2/) + +[SB Admin 2](https://startbootstrap.com/theme/sb-admin-2/) is an open source admin dashboard theme for [Bootstrap](https://getbootstrap.com/) created by [Start Bootstrap](https://startbootstrap.com/). + +For the legacy Bootstrap 3 version of this theme, you can view the [last stable release](https://github.com/StartBootstrap/startbootstrap-sb-admin-2/releases/tag/v3.3.7%2B1) of SB Admin 2 for Bootstrap 3. + +## Preview + +[![SB Admin 2 Preview](https://assets.startbootstrap.com/img/screenshots/themes/sb-admin-2.png)](https://startbootstrap.github.io/startbootstrap-sb-admin-2/) + +**[Launch Live Preview](https://startbootstrap.github.io/startbootstrap-sb-admin-2/)** + +## Status + +[![GitHub license](https://img.shields.io/badge/license-MIT-blue.svg)](https://raw.githubusercontent.com/StartBootstrap/startbootstrap-sb-admin-2/master/LICENSE) +[![npm version](https://img.shields.io/npm/v/startbootstrap-sb-admin-2.svg)](https://www.npmjs.com/package/startbootstrap-sb-admin-2) +[![Build Status](https://travis-ci.org/StartBootstrap/startbootstrap-sb-admin-2.svg?branch=master)](https://travis-ci.org/StartBootstrap/startbootstrap-sb-admin-2) +[![dependencies Status](https://david-dm.org/StartBootstrap/startbootstrap-sb-admin-2/status.svg)](https://david-dm.org/StartBootstrap/startbootstrap-sb-admin-2) +[![devDependencies Status](https://david-dm.org/StartBootstrap/startbootstrap-sb-admin-2/dev-status.svg)](https://david-dm.org/StartBootstrap/startbootstrap-sb-admin-2?type=dev) + +## Download and Installation + +To begin using this template, choose one of the following options to get started: + +* [Download the latest release on Start Bootstrap](https://startbootstrap.com/theme/sb-admin-2/) +* Install via npm: `npm i startbootstrap-sb-admin-2` +* Clone the repo: `git clone https://github.com/StartBootstrap/startbootstrap-sb-admin-2.git` +* [Fork, Clone, or Download on GitHub](https://github.com/StartBootstrap/startbootstrap-sb-admin-2) + +## Usage + +After installation, run `npm install` and then run `npm start` which will open up a preview of the template in your default browser, watch for changes to core template files, and live reload the browser when changes are saved. You can view the `gulpfile.js` to see which tasks are included with the dev environment. + +### Gulp Tasks + +* `gulp` the default task that builds everything +* `gulp watch` browserSync opens the project in your default browser and live reloads when changes are made +* `gulp css` compiles SCSS files into CSS and minifies the compiled CSS +* `gulp js` minifies the themes JS file +* `gulp vendor` copies dependencies from node_modules to the vendor directory + +You must have npm installed globally in order to use this build environment. This theme was built using node v11.6.0 and the Gulp CLI v2.0.1. If Gulp is not running properly after running `npm install`, you may need to update node and/or the Gulp CLI locally. + +## Bugs and Issues + +Have a bug or an issue with this template? [Open a new issue](https://github.com/StartBootstrap/startbootstrap-sb-admin-2/issues) here on GitHub or leave a comment on the [template overview page at Start Bootstrap](https://startbootstrap.com/theme/sb-admin-2/). + +## About + +Start Bootstrap is an open source library of free Bootstrap templates and themes. All of the free templates and themes on Start Bootstrap are released under the MIT license, which means you can use them for any purpose, even for commercial projects. + +* +* + +Start Bootstrap was created by and is maintained by **[David Miller](https://davidmiller.io/)**. + +* +* +* + +Start Bootstrap is based on the [Bootstrap](https://getbootstrap.com/) framework created by [Mark Otto](https://twitter.com/mdo) and [Jacob Thorton](https://twitter.com/fat). + +## Copyright and License + +Copyright 2013-2021 Start Bootstrap LLC. Code released under the [MIT](https://github.com/StartBootstrap/startbootstrap-resume/blob/master/LICENSE) license. diff --git a/content/_spice-h2020/ecosystem/content/_polifonia-project/meetups-knowledge-graph/README.md b/content/_spice-h2020/ecosystem/content/_polifonia-project/meetups-knowledge-graph/README.md new file mode 100755 index 00000000..ee132371 --- /dev/null +++ b/content/_spice-h2020/ecosystem/content/_polifonia-project/meetups-knowledge-graph/README.md @@ -0,0 +1,105 @@ +--- +component-id: meetups-knowledge-graph +type: KnowledgeGraph +name: MEETUPS Knowledge Graph +description: MEETUPS Knowledge Graph module with data on historical meetups and related to MEETUPS Pilot +work-package: +- WP4 +pilot: +- MEETUPS +project: polifonia-project +resource: https://github.com/polifonia-project/meetups-knowledge-graph/ +release-date: 20/07/2022 +release-number: v0.1 +release link: https://github.com/polifonia-project/meetups-knowledge-graph/releases/tag/v0.1 +doi: https://zenodo.org/badge/latestdoi/588597123 +changelog: https://github.com/polifonia-project/meetups-knowledge-graph/releases/tag/v0.1 +licence: +- CC-BY_v4 +copyright: "Copyright (c) 2023 MEETUPS @ The Open University" +contributors: +- Alba Morales Tirado +- Enrico Daga +- Jason Carvalho +credits: +- https://github.com/albamoralest +- https://github.com/enridaga +related components: +- informed-by: + - meetups-ontology + - meetups-corpus-collection +- persona: + - Ortenz + - David + - Sophie +- reuses: + - sparql-anything-java + - sparql-anything-cli +--- + +# MEETUPS Knowledge Graph + +[![DOI](https://zenodo.org/badge/588597123.svg)](https://zenodo.org/badge/latestdoi/588597123) + +The MEETUPS knowledge graph contains data about historical encounters of people in the musical world in Europe from c. 1800 to c. 1945. + + +## Knowledge Graph description + +All the data is extracted from artists' biographies, mainly from open-access digital sources such as Wikipedia artists' web pages. +A total of 33,309 biographies were collected for knowledge extraction and construction of the KG. +Currently, the KG contains data on the data extraction of 1000 biographies in the next deliverable. The KG will include data on the total number of biographies collected. + +## Competency questions related to MEETUPS knowledge graph +Ortenz +- What places did musician Z visited in her career? +- Where did she perform? +- Where did she live? +- Did musician X and performer Y ever meet? Where, when, and why? +- In what context the meeting happened? +- What is the nature of the event? +- Was it a celebration, a festival, a private event? +- Was it a religious or a secular event? +- Who paid to support the event? +- What is the provenance of the event attendees? What and how they happened to be there? +- Did they travel to reach the place? +- Were they invited? Was the meeting accidental? +- How can we characterize the relation among the participants? +- Was there a power relation? (e.g., Patreon / Musician) + +## Statistics: +``` +$ fx -q queries/statistics.sparql -l data/meetups/ +------------------------------- +| key | value | +=============================== +| "Meetups" | 74445 | +| "Persons mentioned" | 51425 | +| "Subjects" | 1002 | +| "Places mentions" | 5595 | +| "Time expressions" | 79838 | +------------------------------- +``` + +## MELODY data stories + +To be released in next deliverable + +## Additional information +### Queries and usage + +Generate list of biographies and related files. +``` +fx -q queries/list-sample.sparql -o data/biographies.csv -f CSV +``` +Generate sentences KG data +``` +fx -q queries/sentences.sparql -i data/biographies.csv -p "data/sentences/?fileId.ttl" -f TTL +``` + + +[part above to be updated...] + +``` +fx -q queries/sentences.sparql -v fileId=10085 -v subject=http://dbpedia.org/resource/Edward_Elgar +``` diff --git a/content/_spice-h2020/ecosystem/content/_polifonia-project/meetups-ontology/README.md b/content/_spice-h2020/ecosystem/content/_polifonia-project/meetups-ontology/README.md new file mode 100755 index 00000000..8213eebb --- /dev/null +++ b/content/_spice-h2020/ecosystem/content/_polifonia-project/meetups-ontology/README.md @@ -0,0 +1,112 @@ +--- +component-id: meetups-ontology +type: Ontology +name: MEETUPS Ontology +description: "Ontology that represents concepts and relationships describing encounters between people in the musical world in Europe from c. 1800 to c. 1945." +work-package: +- WP4 +pilot: +- MEETUPS +project: polifonia-project +resource: https://github.com/polifonia-project/meetups-ontology +release-date: 2023/01/13 +release-number: v0.1 +release-link: https://github.com/polifonia-project/meetups-ontology/releases/tag/v0.1 +doi: https://zenodo.org/badge/latestdoi/588540533 +changelog: https://github.com/polifonia-project/meetups-ontology/releases/tag/v0.1 +licence: Apache-2.0 +copyright: "Copyright (c) 2023 MEETUPS @ The Open University" +contributors: +- Alba Morales Tirado +- Enrico Daga +related-components: +- informed-by: + - meetups-pilot + - meetups-knowledge-graph +- persona: + - Ortenz + - David + - Sophie +--- + +# MEETUPS Ontology + +[![DOI](https://zenodo.org/badge/588540533.svg)](https://zenodo.org/badge/latestdoi/588540533) +[![License: Apache 2.0]](http://www.apache.org/licenses/LICENSE-2.0) + +Ontology URI: [https://w3id.org/polifonia/ontology/meetups-ontology#/](https://w3id.org/polifonia/ontology/meetups-ontology#/) + +The ontology module MEETUPS, which is part of the Polifonia Ontology Network, represents concepts and relationships describing encounters between people in the musical world in Europe from c. 1800 to c. 1945. + +## Ontology description + +Typically, historical meetups, which are the main subject of this module, are described by means of four main components: (i) the people involved in the meetup, for instance, the person that is the subject of interest and the people interacting in the event, (ii) the place where the encounter took place (e.g., city, country, venue), the type of event, the reason (e.g., music making, personal life, business, among others) and the date when it took place. + +This ontology module is strictly related to the Polifonia pilot MEETUPS: ![MEETUPS Pilot](https://github.com/polifonia-project/meetups_pilot) + +Ontology graphic description: +![MEETUPS ontology module](diagrams/meetups-ont-diagram-V0.2.png?raw=true "MEETUPS ontology module") + + +Ontology provenance information: +![MEETUPS ontology module](diagrams/meetups-ont-diagram-V2_prov.png?raw=true "MEETUPS provenance") + + + +## Competency questions related to MEETUPS ontology module +Ortenz +- What places did musician Z visited in her career? +- Where did she perform? +- Where did she live? +- Did musician X and performer Y ever meet? Where, when, and why? +- In what context the meeting happened? +- What is the nature of the event? +- Was it a celebration, a festival, a private event? +- Was it a religious or a secular event? +- Who paid to support the event? +- What is the provenance of the event attendees? What and how they happened to be there? +- Did they travel to reach the place? +- Were they invited? Was the meeting accidental? +- How can we characterize the relation among the participants? +- Was there a power relation? (e.g., Patreon / Musician) + +David +- Where were the places (in which they played)? +- Where were the musicians coming from? + +Sophie +- What is the time relationship between different musicians, e.g., who was working at the same time? +- What was the composer’s network (patrons, institutions …)? + + +These Competency Questions were selected from the following stories +- [Ortenz](https://github.com/polifonia-project/stories/blob/main/Ortenz:%20Music%20Historian/Ortenz%232_MusicalSocialNetwork.md) +- [David](https://github.com/polifonia-project/stories/blob/main/David:%20Music%20Historian/David%231_MusicHistorian.md) +- [Sophie](https://github.com/polifonia-project/stories/blob/main/Sophia:%20Musicologist/Sophia%231_MusiciansAndTheirEnvironment.md) + +## Examples of SPARQL queries + +Refer to queries folder + +## Statistics + +Considering the MEETUPS ontology is still in development, we report the following useful statistics: +- number of classes: 17 +- number of object properties: 5 +- number of datatype properties: 0 +- number of logical axioms: 27 + +## Datasets + +MEETUPS Pilot dataset: dataset representing a collection of ~33k biographies collected from Wikipedia pages, + +- https://github.com/polifonia-project/meetups_corpus_collection + +## Alignment to other ontologies + +MEETUPS pilot is part of the European project Polifonia and it is align to the Polifonia Ontology Network (PON) +- [Core](https://w3id.org/polifonia/ontology/core) + +MEETUPS also reuse ontologies such as Time and ProvOnto +- [Time](http://www.w3.org/2006/time) +- [PROV-Onto](http://w3.org/ns/prov#) \ No newline at end of file diff --git a/content/_spice-h2020/ecosystem/content/_polifonia-project/meetups-ui-design/README.md b/content/_spice-h2020/ecosystem/content/_polifonia-project/meetups-ui-design/README.md new file mode 100644 index 00000000..db0f283c --- /dev/null +++ b/content/_spice-h2020/ecosystem/content/_polifonia-project/meetups-ui-design/README.md @@ -0,0 +1,4 @@ +# MEETUPS user interface design +Designs mockups for the MEETUPS pilot user interface components. + +Designs are provided as both Balsamic Wireframes documents and also a PDF. diff --git a/content/_spice-h2020/ecosystem/content/_polifonia-project/meetups-ui-design/meetups-ui-design.md b/content/_spice-h2020/ecosystem/content/_polifonia-project/meetups-ui-design/meetups-ui-design.md new file mode 100644 index 00000000..ab91a346 --- /dev/null +++ b/content/_spice-h2020/ecosystem/content/_polifonia-project/meetups-ui-design/meetups-ui-design.md @@ -0,0 +1,41 @@ +--- +component-id: meetups-ui-design +type: RequirementsCollection +name: MEETUPS UI Designs +description: A collection of initial user interface mockups, used as part of the the initial application requirements for building the MEETUPS pilot application interface +image: +logo: +demo: +work-package: + - WP5 +pilot: MEETUPS +project: polifonia-project +resource: https://github.com/polifonia-project/meetups-ui-design/blob/main/meetups-ui-initial-designs.pdf +release-date: 17/04/2023 +release-number: v0.1.0 +release-link: https://github.com/polifonia-project/meetups-ui-design/releases/tag/v0.1.0 +doi: +changelog: https://github.com/polifonia-project/meetups-ui-design/releases/tag/v0.1.0 +license: + - Apache-2.0 +copyright: "Copyright (c) 2023 The Open University" +contributors: + - Jason Carvalho + - Alba Morales-Tirado + - Enrico Daga +related-components: + - informed-by: + - Ortenz +--- +# MEETUPS UI Designs +A collection of initial user interface mockups, used as part of the the initial application requirements for building the MEETUPS pilot application interface. + +Follows three images, for the whole content please see [this PDF](meetups-ui-initial-designs.pdf). + +![image](png/meetups-ui-initial-designs-1.png) + + +![image](png/meetups-ui-initial-designs-2.png) + + +![image](png/meetups-ui-initial-designs-3.png) \ No newline at end of file diff --git a/content/_spice-h2020/ecosystem/content/_polifonia-project/meetups_corpus_collection/README.md b/content/_spice-h2020/ecosystem/content/_polifonia-project/meetups_corpus_collection/README.md new file mode 100644 index 00000000..35927d2f --- /dev/null +++ b/content/_spice-h2020/ecosystem/content/_polifonia-project/meetups_corpus_collection/README.md @@ -0,0 +1,75 @@ +--- +component-id: meetups-corpus-collection +name: MEETUPS Corpus collection +description: This is a tool to build to download the Wikipedia pages of people in the music scene in Europe +type: Software +release-date: 20/07/2022 +release-number: v1.0 +work-package: +- WP4 +pilot: +- MEETUPS +keywords: +- Wikipedia +- Music +licence: +- Apache-2.0 +release link: + - https://github.com/polifonia-project/meetups_corpus_collection/releases/tag/v1.0 +credits: + - https://github.com/albamoralest +--- + +# MEETUPS Corpus collection + +[![DOI](https://zenodo.org/badge/504547694.svg)](https://zenodo.org/badge/latestdoi/504547694) + +### Collecting Wikipedia pages of people in the music scene in Europe + + +MEETUPS Corpus collection is a tool developed in Python and PyCharm IDE. It collects Wikipedia web pages (in txt format) of music authors in Europe. + +- Uses the "wikipedia" library to download only wikipedia webpage text +- Process the list of files in chunks of 100 units +- The process can start and stop any time as it controls the last downloaded item + + +#### Information on installation and setup + +- Pre-requirements: + - A CSV file with the list of authors' wikipedia id and store in sparqlQueryResults/ directory + - Python 3.9 +- Install wikipedia library: + - pip install wikipedia +- To execute: + - Download project and execute __init__.py file + +#### Details of dataset + +SPARQL queries to retrieve authors' names and dbo:wikiPageID information using Dbpedia SPARQL Endpoint https://dbpedia.org/sparql + +Query filters: + + Categories: + _sparql.csv + +Dataset: + + Location: + dataset/ + Format: + Text files .txt + Name convention: + .txt + Total biographies collected: + 33,309 authors wikipedia webpage + Summary total biographies collected: + sparqlQueryResults/TOTAL_download_biography.csv + Meetups pilot sample: 1.002 + +Select random biographies -> sampleBiographies.py + diff --git a/content/_spice-h2020/ecosystem/content/_polifonia-project/meetups_pilot/README.md b/content/_spice-h2020/ecosystem/content/_polifonia-project/meetups_pilot/README.md new file mode 100644 index 00000000..050862d9 --- /dev/null +++ b/content/_spice-h2020/ecosystem/content/_polifonia-project/meetups_pilot/README.md @@ -0,0 +1,54 @@ +--- +container-id: meetups-pilot +name: Musical MEETUPS +type: Project +description: MEETUPS Pilot container with all the elements that support the knowledge extraction of historical meetups +work-package: +- WP4 +pilot: +- MEETUPS +project: polifonia-project +bibliography: +- main-publication: "Morales Tirado, Alba; Carvalho, Jason; Mulholland, Paul and Daga, Enrico (2023). Musical Meetups: a Knowledge Graph approach for Historical Social Network Analysis. In: Proceedings of the ESWC 2023 Workshops and Tutorials, Semantic Methods for Events and Stories (SEMMES)." +funder: + - name: European Commission H2020 + url: https://ec.europa.eu/info/funding-tenders/opportunities/portal/screen/programmes/h2020 +credits: "This project has received funding from the European Union’s Horizon 2020 research and innovation programme under grant agreement GA101004746. The communication reflects only the author’s view and the Research Executive Agency is not responsible for any use that may be made of the information it contains." +has-part: +- meetups-knowledge-graph +- meetups-ontology +- meetups-data-cleaning +- meetups-themes +- meetups-entity-recognition +- meetups-time-extraction +- meetups-corpus-collection +- meetups-coreference +- meetups-hm-identification +--- + +# MEETUPS PILOT + +[![DOI](https://zenodo.org/badge/436452967.svg)](https://zenodo.org/badge/latestdoi/436452967) + +Extracting information of musical artist from Wikipedia pages. + +The initial collection of Wikipedia pages can be found here: https://github.com/albamoralest/scrappingWikipediaMusicBiographies + +In this pilot we extract people, places, events and time entities from 1002 wikipedia pages. + +Step 1: cleaning and organising information by sentences + +Step 2: + + 2.1 extracting entities using DBpedia spotlight + + 2.2 extracting time expressions using nltk + +The dataset is organised by folders: + +The folder extractedEntities contains the final result of the extraction pipeline + +Here you will find a csv named by the wikipedia id of the authors. In the file "infoBiographies.csv" you will find the name of the person related with the wikipedia page, the date of birth + + +![MEETUPS software-tools](https://github.com/polifonia-project/meetups-knowledge-graph/blob/main/meetups-software_tools.png?raw=true "MEETUPS software tools by task") \ No newline at end of file diff --git a/content/_spice-h2020/ecosystem/content/_polifonia-project/meetups_pilot/README_coreference.md b/content/_spice-h2020/ecosystem/content/_polifonia-project/meetups_pilot/README_coreference.md new file mode 100644 index 00000000..2d5bfd52 --- /dev/null +++ b/content/_spice-h2020/ecosystem/content/_polifonia-project/meetups_pilot/README_coreference.md @@ -0,0 +1,59 @@ +--- +component-id: meetups-coreference +type: Software +name: MEETUPS - Identification of temporal knowledge +brief-description: "This tool is part of the MEETUPS pilot and executes the coreference resolution task. It is in charge of identifying mentions to entities in the form of noun phrase, named, or pronominal text, particularly people and places. This software supports the identification of missing entities during the entity recognition and linking task and leverages the possibility of identifying historical meetups. Furthermore, the software tool validates that these mentions refer to the a named entity and link them to DBpedia or Wikipedia resources." +work-package: +- WP4 +pilot: +- MEETUPS +project: polifonia-project +resource: https://github.com/polifonia-project/meetups_pilot/blob/main/05_Coreference.ipynb +release-date: 31/08/2023 +release-number: v0.1 +release-link: https://github.com/polifonia-project/meetups_pilot/releases/tag/v0.3 +doi: https://zenodo.org/badge/latestdoi/436452967 +changelog: https://github.com/polifonia-project/meetups_pilot/releases/tag/v0.3 +licence: +- Apache-2.0 +copyright: "Copyright (c) 2023 MEETUPS @ The Open University" +contributors: +- Alba Morales Tirado +- Enrico Daga +related-component: +- informed-by: + - meetups-entity-recognition + - meetups-time-extraction + - meetups-themes +--- + +# MEETUPS - coreference resolution + +This tool is part of the MEETUPS pilot and executes the coreference resolution task. It is in charge of identifying mentions to entities in the form of noun phrase, named, or pronominal text, particularly people and places. This software supports the identification of missing entities during the entity recognition and linking task and leverages the possibility of identifying historical meetups. Furthermore, the software tool validates that these mentions refer to the a named entity and link them to DBpedia or Wikipedia resources. + + +This component processes text at paragraph level and annotates entities at sentence and paragraph level. It is developed using Python and uses the spaCy library https://spacy.io/universe/project/coreferee/. +This library can be integrated as a plugin in the spaCy's pipeline for Natural Language Processing. The coreferee library resolves coreferences within English text and uses a mixture of neural networks and programmed rules https://github.com/richardpaulhudson/coreferee. +Furthermore, this component validated the named entity exists and link it to a DBpedia or Wikipedia resource. + +### Information on installation and setup + + - Jupyter Notebook: + 05_Coreference.ipynb + +### Details of the data + + Code location: + |_ 05_Coreference.ipynb + + Data input: + |_ indexedSentences/ + + Coreference annotations + Data ouput: + |_ meetupsCorefOutputPP/ + + + |_ README_meetups-coreference.md + + diff --git a/content/_spice-h2020/ecosystem/content/_polifonia-project/meetups_pilot/README_data_cleaning.md b/content/_spice-h2020/ecosystem/content/_polifonia-project/meetups_pilot/README_data_cleaning.md new file mode 100644 index 00000000..0e488f49 --- /dev/null +++ b/content/_spice-h2020/ecosystem/content/_polifonia-project/meetups_pilot/README_data_cleaning.md @@ -0,0 +1,69 @@ +--- +component-id: meetups-data-cleaning +type: Software +name: MEETUPS preparation - data cleaning +description: "This tool is part of the corpus preparation process and it is used to clean data collected from Wikipedia." +work-package: +- WP4 +pilot: +- MEETUPS +project: polifonia-project +resource: https://github.com/polifonia-project/meetups_pilot/blob/main/01_CleaningText.ipynb +release-date: 20/07/2022 +release-number: v0.1 +release-link: https://github.com/polifonia-project/meetups_pilot/releases/tag/v0.2 +doi: https://zenodo.org/badge/latestdoi/436452967 +changelog: https://github.com/polifonia-project/meetups_pilot/releases/tag/v0.2 +licence: +- Apache-2.0 +copyright: "Copyright (c) 2023 MEETUPS @ The Open University" +contributors: +- Alba Morales Tirado +- Enrico Daga +related-component: +- persona: + - Ortenz + - David + - Sophie +--- + +# MEETUPS Corpus preparation: data cleaning +### MEETUPS Corpus preparation: Cleaning data collected from Wikipedia web pages of people in the music scene in Europe + + +MEETUPS data cleaning is a tool developed using Python and Jupyter Notebook. This software prepares the biographies (collected as text files) in https://github.com/polifonia-project/meetups_corpus_collection for the next step in the extraction of historical meetups process. + +- Use the Wikipedia authors' webpages collected in https://github.com/polifonia-project/meetups_corpus_collection +- Clean text blank lines, sections with no historical meetups data +- Organise the text in sentences as the main unit to extract meetups information + + +#### Information on installation and setup + + - Run Jupyter Notebook 01_CleaningText.ipynb + +#### Details of the data + + Code location: + + |_ 01_CleaningText.ipynb + + Raw corpus location + Data output: + |_ text_dataset/ + + Clean text location + Data input: + |_ cleanText/ + + Index data location + Data output: + |_ indexedParagraphs/ + |_ indexedSentences/ + + |_ README_data_cleaning.md + + +DOI: + +[![DOI](https://zenodo.org/badge/436452967.svg)](https://zenodo.org/badge/latestdoi/436452967) diff --git a/content/_spice-h2020/ecosystem/content/_polifonia-project/meetups_pilot/README_hm-identification.md b/content/_spice-h2020/ecosystem/content/_polifonia-project/meetups_pilot/README_hm-identification.md new file mode 100644 index 00000000..49fe74d2 --- /dev/null +++ b/content/_spice-h2020/ecosystem/content/_polifonia-project/meetups_pilot/README_hm-identification.md @@ -0,0 +1,65 @@ +--- +component-id: meetups-hm-identification +type: Software +name: MEETUPS - Historical meetups identification +brief-description: "MEETUPS Historical meetups identification was developed using Python and Jupyter Notebook. As input it uses the bag of entities obtained from the Entity Recognition and Coreference steps. The output is a corpus that contains the text (typically a sentence or a set of sentences), and the list of entities that account for a meetup. The results are stored in CSV files, grouped by biographies. The corpus is used later to build the MEETUPS KG." +work-package: +- WP4 +pilot: +- MEETUPS +project: polifonia-project +resource: https://github.com/polifonia-project/meetups_pilot/blob/main/06_Coreference.ipynb +release-date: 31/08/2023 +release-number: v0.1 +release-link: https://github.com/polifonia-project/meetups_pilot/releases/tag/v0.3 +doi: https://zenodo.org/badge/latestdoi/436452967 +changelog: https://github.com/polifonia-project/meetups_pilot/releases/tag/v0.3 +licence: +- Apache-2.0 +copyright: "Copyright (c) 2023 MEETUPS @ The Open University" +contributors: +- Alba Morales Tirado +- Enrico Daga +related-component: +- informed-by: + - meetups-entity-recognition + - meetups-time-extraction + - meetups-themes + - meetups-coreference +--- + +# MEETUPS - Historical meetups identification + +This tool is part of the MEETUPS pilot and processes text from music personalities' biographies to identify historical meetups. The software component gathers the output produced by the tools of Entity Recognition and Coreference \& meetups annotation steps (see Figure) and runs an automatic evaluation to identify if elements that define a historical meetup are present in a piece of text. + +![MEETUPS software-tools](https://github.com/polifonia-project/meetups-knowledge-graph/blob/main/meetups-software_tools.png?raw=true "MEETUPS software tools by task") + +First, the tool examines each piece of text, at sentence level, and evaluates if all the entity types are present. +Sentences that comply with this requirement are automatically annotated as a historical meetup. +All the sentences that have one or more entity type missing are annotated temporarily as a historical trace. The next step is to search for complementary information within the same paragraph and potentially identify more historical meetups. + +The algorithm searches for missing entities in previous sentences within the same paragraph, the aim is to group two or more consecutive sentences that refer to the same event and among them feed all the required entity types of a historical meetup. These sentences should refer to the same type of encounter in order to be considered and not have been previously annotated as a historical meetup. + +MEETUPS Historical meetups identification was developed using Python and Jupyter Notebook. As input it uses the bag of entities obtained from the Entity Recognition and Coreference steps. The output is a corpus that contains the text (typically a sentence or a set of sentences), and the list of entities that account for a meetup. The results are stored in CSV files, grouped by biographies. The corpus is used later to build the MEETUPS KG. + +### Information on installation and setup + + - Jupyter Notebook: + 06_MeetupsIdentification.ipynb + +### Details of the data + + Code location: + |_ 06_MeetupsIdentification.ipynb + + Data input: + |_ indexedSentences/ + + Historical meetups annotations + Data ouput: + |_ meetupsIdentification/ + + + |_ README_hm-identification.md + + diff --git a/content/_spice-h2020/ecosystem/content/_polifonia-project/meetups_pilot/README_identification_themes.md b/content/_spice-h2020/ecosystem/content/_polifonia-project/meetups_pilot/README_identification_themes.md new file mode 100644 index 00000000..eae30986 --- /dev/null +++ b/content/_spice-h2020/ecosystem/content/_polifonia-project/meetups_pilot/README_identification_themes.md @@ -0,0 +1,86 @@ +--- +component-id: meetups-themes +type: Software +name: MEETUPS Identification of themes +description: "This tool is part of the MEETUPS pilot and processes text from music personalities' biographies to find encounter types. It uses 'sklearn' and a set of Machine Learning algorithms to classify sentences according to the established type of events. The tool extracts information from one of the four elements defining a meetup: the type of encounter (what). Encounter type, along with data of the people involved (who), the place (where) and the time it took place (what), complete the historical meetup information." +work-package: +- WP4 +pilot: +- MEETUPS +project: polifonia-project +resource: https://github.com/polifonia-project/meetups_pilot/blob/main/MeetupType_applyClassifier.ipynb +release-date: 20/07/2022 +release-number: v0.1 +release-link: https://github.com/polifonia-project/meetups_pilot/releases/tag/v0.2 +doi: https://zenodo.org/badge/latestdoi/436452967 +changelog: https://github.com/polifonia-project/meetups_pilot/releases/tag/v0.2 +licence: +- Apache-2.0 +copyright: "Copyright (c) 2023 MEETUPS @ The Open University" +contributors: +- Alba Morales Tirado +- Enrico Daga +related-component: +- informed-by: + - meetups-data-cleaning + - meetups-corpus-collection +--- + +# MEETUPS - Identification of themes + +MEETUPS identification of people and places is a tool developed using Python and Jupyter Notebook. SKLEARN and a set of Machine Learning algorithms to classify sentences according to the established type of events. The tool allows the extraction of one (the type of encounter) of the four elements that define a historical meetup. +The encounter types are music-making, business meetings, personal life, social life, coincidence, public celebration, and education. + +This implementation is divided in three main tasks: +a) Generation of the training dataset +In order to identify and classify sentences according to the encounter type we need first to build a dataset with sentences that describe the different encounter types. +Approach: +- Manually prepare seed terms for each meetup type +- Randomly select sentences with those words from the corpus +- Assign the relevant meetup type to each one of those sentences + +b) Training the classifier +Approach: +- Build a balanced training set by selecting first sentences from low represented classes +- Train and test MLPClassifier + +c) Applying the classifier +Use the model tested in b) and infer the type of encounter for all the data in the corpus + +## Information on installation and setup + + - Jupyter Notebook: + MeetupType_applyClassifier.ipynb + +## Details of the data + + Running the Themes classifier: + |_ MeetupType_applyClassifier.ipynb + + Training the Themes classifier: + |_ MeetupType_prototypeSentences.ipynb + + Generating the training dataset: + |_ MeetupType_trainClassifier.ipynb + + Data location + + Data input: + |_ indexedSentences/ + + Data output: + |_ extractedMeetupTypes/ + + Classifier: + meetupType/models/MLPClassifier_2.clf' + + Prototype sentences: + |_ meetupType/prototypeSentences_*.csv + + + |_ README_identification_themes.md + + +## DOI: + +[![DOI](https://zenodo.org/badge/436452967.svg)](https://zenodo.org/badge/latestdoi/436452967) diff --git a/content/_spice-h2020/ecosystem/content/_polifonia-project/meetups_pilot/README_people_places_identification.md b/content/_spice-h2020/ecosystem/content/_polifonia-project/meetups_pilot/README_people_places_identification.md new file mode 100644 index 00000000..ca9da322 --- /dev/null +++ b/content/_spice-h2020/ecosystem/content/_polifonia-project/meetups_pilot/README_people_places_identification.md @@ -0,0 +1,80 @@ +--- +component-id: meetups-entity-recognition +type: Software +name: MEETUPS identification of people and places +brief-description: "This tool is part of the MEETUPS pilot and processes text from music personalities' biographies. It uses DBpedia Spotlight to identify and annotate possible entity mentions from input text. This is an essential process to identify two of the four main elements that define a meetup: people (who participated) and place (where). Along with data of time (when) the meeting happened and the event that took place (what) complete a historical meetup data point." +work-package: +- WP4 +pilot: +- MEETUPS +project: polifonia-project +resource: https://github.com/polifonia-project/meetups_pilot/blob/main/02_Identify_PP.ipynb +release-date: 20/07/2022 +release-number: v0.1 +release-link: https://github.com/polifonia-project/meetups_pilot/releases/tag/v0.2 +doi: https://zenodo.org/badge/latestdoi/436452967 +changelog: https://github.com/polifonia-project/meetups_pilot/releases/tag/v0.2 +licence: +- Apache-2.0 +copyright: "Copyright (c) 2023 MEETUPS @ The Open University" +contributors: +- Alba Morales Tirado +- Enrico Daga +related-component: +- informed-by: + - meetups-data-cleaning + - meetups-corpus-collection +--- + +# MEETUPS identification of people and places + +MEETUPS identification of people and places is a tool developed using Python and Jupyter Notebook. This software uses DBpedia Spotlight to identify and annotate possible entity mentions from input text. This is an essential process to extract two of the three main elements that define a meetup: people (who participated), place (where) and time (when). + +The implementation is divided into two Jupyter notebooks: + +The first notebook is in charge of querying DBpedia Spotlight, processing the responses (JSON format) and store responses locally. +It uses as input the corpus of music personalities generated by the MEETUPS cleaning component https://github.com/polifonia-project/meetups_pilot/blob/main/01_CleaningText.ipynb + +- Use DBpedia Spotlight to identify and annotate entity mentions from input text. +- Retrieve and process JSON format responses from DBpedia Spotlight +- Store responses for later processing. + +The second notebook uses the responses from DBpedia Spotlight to capture data of people and places: +- Uses the responses from DBpedia captured in the previous notebook +- Search for two three types of entities: + http://dbpedia.org/ontology/Person + http://dbpedia.org/ontology/MusicalArtist + http://dbpedia.org/ontology/Place +- Classify the first two types as "people" and the last one as "place" +- Store the results in extractedEntitiesPersonPlaceOnly/ + +#### Information on installation and setup + + - Jupyter Notebook: + 02_queryDbpedia.ipynb + 02_Identify_PP.ipynb + +#### Details of the data + + Code location: + |_ 02_queryDbpedia.ipynb + |_ 02_Identify_PP.ipynb + + Index data location + Data input: + |_ indexedSentences/ + + DBpedia Spotlight annotations: + |_ cacheSpotlightResponse/ + + People and places annotation + Data output: + |_ extractedEntitiesPersonPlaceOnly/ + + + |_ README_people_places_identification.md + + +DOI: + + TODO diff --git a/content/_spice-h2020/ecosystem/content/_polifonia-project/meetups_pilot/README_time_expressions.md b/content/_spice-h2020/ecosystem/content/_polifonia-project/meetups_pilot/README_time_expressions.md new file mode 100644 index 00000000..40ad390f --- /dev/null +++ b/content/_spice-h2020/ecosystem/content/_polifonia-project/meetups_pilot/README_time_expressions.md @@ -0,0 +1,97 @@ +--- +component-id: meetups-time-extraction +type: Software +name: MEETUPS - Identification of temporal knowledge +brief-description: "This tool is part of the MEETUPS pilot and processes text from music personalities' biographies to find time expressions. It uses NLTK and a set of heuristic rules to identify and annotate temporal knowledge from text. The tool extracts information from one out of the four elements that define a meetup: the date or moment in time when it happened (when). Time expressions, along with data of the people involved (who), the place (where) and the event that took place (what), complete the historical meetup information." +work-package: +- WP4 +pilot: +- MEETUPS +project: polifonia-project +resource: https://github.com/polifonia-project/meetups_pilot/blob/main/03_Identify_TimeE.ipynb +release-date: 20/07/2022 +release-number: v0.1 +release-link: https://github.com/polifonia-project/meetups_pilot/releases/tag/v0.2 +doi: https://zenodo.org/badge/latestdoi/436452967 +changelog: https://github.com/polifonia-project/meetups_pilot/releases/tag/v0.2 +licence: +- Apache-2.0 +copyright: "Copyright (c) 2023 MEETUPS @ The Open University" +contributors: +- Alba Morales Tirado +- Enrico Daga +related-component: +- informed-by: + - meetups-data-cleaning + - meetups-corpus-collection +--- + +# MEETUPS + +MEETUPS identification of temporal knowledge is a tool developed using Python and Jupyter Notebook. This software uses NLTK Toolkit and heuristic rules to identify and annotate time expressions from input text. The tool allows the extraction of one (when a historical meetup happened) of the four elements that define a historical meetup. + +This implementation is a rule-based Time Expression recognition tagger based on research by Zhong et al. and SynTime software (https://github.com/zhongxiaoshi/syntime). Their work was originally tested using three datasets: TimeBank, WikiWars and Tweets. +The authors implement a three-layer system that recognises time expressions using syntactic token types and general heuristic rules. + +First layer - token identification: + - Annotate tokes with POS tags, we use NLTK. In SynTime they used CoreNLP. + - Annotate tokes according time tokens proposed by Zhong et al. + Three types of tokens: TIME, MODIFIER, NUMERAL. + Each type have more specific types: + MODIFIER = ["PREFIX","SUFFIX","LINKAGE","COMMA","PARENTHESIS","INARTICLE"] + NUMERAL = ["BASIC","DIGIT","ORDINAL"] + TIME = ["DECADE", "YEAR", "SEASON", "MONTH", "WEEK", "DATE", "TIME", "DAY_TIME", "TIMELINE", "HOLIDAY", "PERIOD", "DURATION", "TIME_UNIT","TIME_ZONE", "ERA","MID","TIME_ZONE","DAY","HALFDAY"] + *In our implementation we add the type PARENTHESIS and improve regular expressions + +Second layer - time segment identification: + - Search the surroundings of each time token identified previously for modifiers and numerals + - Gather the time token with its modifiers and numerals and form a time segment + - The search is under heuristic rules + Search tokens on the left + If PREFIX or NUMERAL or IN_ARTICLE continue searching + Search tokens on the right + If SUFIX or NUMERAL continue searching + For right and left search, if token is COMMA or LINKAGE then stop + + +Third layer - time expression extraction + If time segments overlap, then apply heuristic rules and merge segments + +Time expressions classification: + We add a step and classify time expressions according to literature + - Time range: generally, one or two bounds, e.g., from XX to XX, from XX, to XX, until XX. + - Time point: exact date and or time description 23/03/1294 + - Time reference: usually incomplete dates (19 April), 2 weeks, later this year, relative to the document (the author’s date of birth? Sentence context? For later) + +Finally the tool stores the results as a CSV file in extractedTimeExpressions/ + + +### Information on installation and setup + + - Jupyter Notebook: + 03_Identify_TimeE.ipynb + +### Details of the data + + Code location: + |_ 03_Identify_TimeE.ipynb + + Regular expressions: + |_ timeRegex.txt + + Data location + + Data input: + |_ indexedSentences/ + + Time expressions annotations + Data ouput: + |_ extractedTimeExpressions/ + + + |_ README_people_places_identification.md + + +### DOI: + + TODO diff --git a/content/_spice-h2020/ecosystem/content/_polifonia-project/music-analysis-ontology/README.md b/content/_spice-h2020/ecosystem/content/_polifonia-project/music-analysis-ontology/README.md new file mode 100644 index 00000000..f045cab2 --- /dev/null +++ b/content/_spice-h2020/ecosystem/content/_polifonia-project/music-analysis-ontology/README.md @@ -0,0 +1,13 @@ +# modal-tonal-ontology +Ontology dedicated to the modal-tonal organisation of polyphonic works. + +Shield: [![CC BY-NC-SA 4.0][cc-by-nc-sa-shield]][cc-by-nc-sa] + +This work is licensed under a +[Creative Commons Attribution-NonCommercial-ShareAlike 4.0 International License][cc-by-nc-sa]. + +[![CC BY-NC-SA 4.0][cc-by-nc-sa-image]][cc-by-nc-sa] + +[cc-by-nc-sa]: http://creativecommons.org/licenses/by-nc-sa/4.0/ +[cc-by-nc-sa-image]: https://licensebuttons.net/l/by-nc-sa/4.0/88x31.png +[cc-by-nc-sa-shield]: https://img.shields.io/badge/License-CC%20BY--NC--SA%204.0-lightgrey.svg diff --git a/content/_spice-h2020/ecosystem/content/_polifonia-project/music-meta-ontology/README.md b/content/_spice-h2020/ecosystem/content/_polifonia-project/music-meta-ontology/README.md new file mode 100644 index 00000000..c86f8df6 --- /dev/null +++ b/content/_spice-h2020/ecosystem/content/_polifonia-project/music-meta-ontology/README.md @@ -0,0 +1,97 @@ +

+ +

+ +# Music Meta + +An ontology for music metadata. + +[![DOI](https://zenodo.org/badge/372536364.svg)](https://zenodo.org/badge/latestdoi/372536364) +[![License: CC BY 4.0](https://img.shields.io/badge/License-CC_BY_4.0-lightgrey.svg)](https://creativecommons.org/licenses/by/4.0/) + +> 🔗 Ontology URI: [https://w3id.org/polifonia/ontology/music-meta/](https://w3id.org/polifonia/ontology/music-meta/) + +Music Meta is a **rich** and **flexible** semantic model to describe **music metadata** related to artists, compositions, performances, recordings, broadcasts, and links. Music Meta provides an **abstraction** layer to represent (Western) music metadata across different genres and periods, for various stakeholders and music datasets. The ontology is thus designed to be specialised to the specific contexts of application (see e.g. the [Tunes](https://github.com/polifonia-project/tunes-ontology/tree/main) and [CoMeta](https://github.com/polifonia-project/cometa-ontology) ontologies) and is part of the [Polifonia Ontology Network](https://github.com/polifonia-project/ontology-network). + +Music Meta focuses on provenance and interoperability – es- sential requirements for the integration of music datasets, which is currently hampered by the specificity of existent ontologies. The model is based on the Information-Realisation ODP [1], allowing to reduce complexity of FRBR-based models, whose application in the music domain has raised concerns [2]. +To enable data integration from existing knowledge bases and datasets, we also align Meta to Music Ontology [3], DOREMUS [4], and Wikidata. To facil- itate the reuse of Music Meta and its data conversion into OWL/RDF Knowl- edge Graphs, we developed a library to map arbitrary music metadata into RDF triples. This enables a practical and scalable workflow for data lifting to create Music Knowledge Graphs without expert knowledge of our ontological model. + +![overview](diagrams/music_meta.png) + +**How?** We follow eXtreme Design methodologies and best practices for data engineering [5], to reflect the perspectives and the requirements of various stakeholders into the design of the model, while leveraging ontology design patterns and accounting for provenance at different levels (claims, links). We provide a first evaluation of the model, alignments to other schema (Music Ontology [6], DOREMUS [7], Wikidata), and support for data transformation. + +**Why another ontology for music metadata?** The interoperability of metadata is an essential requirement for the integration of music datasets, which is curently hampered by the specificity of existent ontologies. The semantic description of music metadata is indeed a key requirement for the creation of music datasets that can be aligned, integrated, and accessed for information retrieval and knowledge discovery. It is nonetheless an open challenge due to the complexity of musical concepts arising from different genres, styles, and time periods – hence requiring a lingua franca to accommodate various stakeholders (music librarians, musicologists, music analysts, cataloguers, data engineers, etc.). + +> :information_source: Check out [our website](https://polifonia-project.github.io/music-meta-ontology/) for more documentation and examples. + + + +## Competency questions addressed + +- Which is the composer of a musical piece? + - Is the composer of a musical piece known? +- Which are the members of a music ensemble? +- Which role a music artist played within a music ensemble? +- In which time interval has a music artist been a member of a music ensemble? +- Where was a music ensemble formed? +- Which award was a music artist nominated for? +- Which award was received by a music artist? +- Which music artists has a music artist been influenced by? +- Which music artist has a music artist collaborated with? +- Which is the start date of the activity of a music artist? +- Which is the end date of the activity of a music artist? +- Which is the name of a music artist? +- Which is the alias of a music artist? +- Which is the language of the name/alias of a music artist? +- Which music dataset has a music algorithm been trained on? +- Which is the process that led to the creation of a musical piece? +- In which time interval did the creation process took place? +- Where did the creation process took place? +- Which are the creative actions composing the creation process of a musical piece? +- Which task was executed by a creative action? +- Which are the parts of a musical piece? +- Which collection is a musical piece member of? +- Where was a musical piece performed? +- When was a musical piece performed? +- Which music artists took part to a musical performance? +- Which is the recording process that recorded a musical performance? +- Which is the recording produced by a recording process? + + +## Examples of SPARQL queries addressed +- Which are the members of a music ensemble? +``` +PREFIX mm: +PREFIX core: +SELECT DISTINCT ?musicEnsembleMember +WHERE { ?musicEnsembleMember core:isMemberOf ?musicEnsemble . +?musicEnsemble rdf:type mm:MusicEnsemble . +} +``` + +- Which role a music artist played within a music ensemble? +``` +PREFIX mm: +PREFIX core: +SELECT DISTINCT ?musicEnsembleMember ?musicEnsembleMemberRole +WHERE { ?musicEnsembleMember mm:isMemberOfMusicEnsembleInvolvedIn ?musicEnsembleMembership . +?musicEnsembleMembership rdf:type mm:MusicEnsembleMembership ; +core:involvesRole ?musicEnsembleMemberRole . +} +``` + +## References + +[1] Gangemi, A., Peroni, S.: The Information Realization Pattern. In: Hitzler, P., Gangemi, A., Janowicz, K., Krisnadhi, A., Presutti, V. (eds.) Ontology Engineer- ing with Ontology Design Patterns - Foundations and Applications, Studies on the Semantic Web, vol. 25, pp. 299–312. IOS Press (2016). + +[2] Riley, J.: Application of the Functional Requirements for Bibliographic Records (FRBR) to Music. In: ISMIR. pp. 439–444 (2008) + +[3] Raimond, Y., Abdallah, S., Sandler, M., Giasson, F.: The music ontology. In: Pro- ceedings of the 8th International Conference on Music Information Retrieval (IS- MIR 2007). Vienna, Austria (Sep 2007) + +[4] Allik, A., Fazekas, G., Sandler, M.B.: An ontology for audio features. In: Proceed- ings of the 17th International Society for Music Information Retrieval Conference, ISMIR 2016, New York City, United States, August 7-11, 2016 (2016) + +[5] E. Blomqvist, K. Hammar, and V. Presutti, “Engineering Ontologies with Patterns-The eXtreme Design Methodology.” Ontology Engineering with Ontology Design Patterns, no. 25, 2016. + +[6] Raimond, Y., Abdallah, S. A., Sandler, M. B., & Giasson, F. (2007, September). The Music Ontology. In ISMIR (Vol. 2007, p. 8th). + +[7] Achichi, M., Lisena, P., Todorov, K., Troncy, R., & Delahousse, J. (2018). DOREMUS: A graph of linked musical works. In The Semantic Web–ISWC 2018: 17th International Semantic Web Conference, Monterey, CA, USA, October 8–12, 2018, Proceedings, Part II 17 (pp. 3-19). Springer International Publishing. \ No newline at end of file diff --git a/content/_spice-h2020/ecosystem/content/_polifonia-project/music-meta-ontology/assets/ecosystem/meta_ontology_header.md b/content/_spice-h2020/ecosystem/content/_polifonia-project/music-meta-ontology/assets/ecosystem/meta_ontology_header.md new file mode 100644 index 00000000..372e8ce9 --- /dev/null +++ b/content/_spice-h2020/ecosystem/content/_polifonia-project/music-meta-ontology/assets/ecosystem/meta_ontology_header.md @@ -0,0 +1,73 @@ +--- +component-id: https://w3id.org/polifonia/ontology/music-meta/ +type: Ontology +name: Music Meta +description: An ontology to describe music metadata +image: diagrams/music_meta.png +logo: website/static/img/musicmeta_logo.png +work-package: +- WP2 +pilot: +- INTERLINK +- TUNES +- FACETS +- TONALITIES +project: polifonia-project +resource: ontology/musicmeta.owl +release-date: 13/04/2023 +release-number: v1.0 +release-link: https://github.com/polifonia-project/ontology-network/releases +doi: 10.5281/zenodo.7919970 +changelog: https://github.com/polifonia-project/ontology-network/releases +licence: +- CC-BY_v4 +copyright: "Copyright (c) 2023 Music Meta Contributors" +contributors: # replace these with the GitHub URL of each contributor +- Jacopo de Berardinis +- Andrea Poltronieri +- Valentina Anita Carriero +- Nicolas Lazzari +- Peter van Kranenburg +- Philippe Rigaux +- Mari Wigham +- Marco Gurrieri +related-components: +- informed-by: + - polifoniacq-dataset +- reuses: # any reused/imported ontology + - https://w3id.org/polifonia/ontology/core/ +- story: # any related story this ontology addresses + - Linka#1_MusicKnowledge # TODO Add more +- persona: # any persona this ontology addresses + - Linka +--- + + + +# Music Meta Ontology + +The Music Meta module provides a rich and flexible ontology to describe music +metadata related to artists, compositions, performances, recordings, +casts, and links. Music Meta focuses on provenance and interoperability – +essential requirements for the integration of music datasets, which is currently +hampered by the specificity of existent ontologies. The model is based on the +Information-Realisation ODP, allowing to reduce complexity of FRBR-based +models, whose application in the music domain has raised concerns. +To enable data integration from existing knowledge bases and datasets, we also +align Meta to Music Ontology, DOREMUS, and Wikidata. To facil- itate +the reuse of Music Meta and its data conversion into OWL/RDF Knowl- edge Graphs, +we developed a library to map arbitrary music metadata into RDF triples. This +enables a practical and scalable workflow for data lifting to create Music +Knowledge Graphs without expert knowledge of our ontological model. + +[Link to the website](https://github.com/polifonia-project/music-meta-ontology) \ No newline at end of file diff --git a/content/_spice-h2020/ecosystem/content/_polifonia-project/music-meta-ontology/assets/ecosystem/pymusicmeta_header.md b/content/_spice-h2020/ecosystem/content/_polifonia-project/music-meta-ontology/assets/ecosystem/pymusicmeta_header.md new file mode 100644 index 00000000..b91123ca --- /dev/null +++ b/content/_spice-h2020/ecosystem/content/_polifonia-project/music-meta-ontology/assets/ecosystem/pymusicmeta_header.md @@ -0,0 +1,51 @@ +--- +component-id: pymusicmeta +type: Software +name: pymusicmeta +description: A library to create Music Meta resources on the Web. +logo: website/static/img/musicmeta_logo.png +work-package: +- WP2 +pilot: +- INTERLINK +- TUNES +- FACETS +- TONALITIES +project: polifonia-project +resource: https://github.com/polifonia-project/music-meta-ontology/tree/main/musicmeta +release-date: 13/04/2023 +release-number: v1.0 +release-link: https://github.com/polifonia-project/music-meta-ontology +doi: 10.5281/zenodo.7919970 +changelog: https://github.com/polifonia-project/music-meta-ontology +licence: +- IscLicense +copyright: "Copyright (c) 2023 Music Meta Contributors" +contributors: # replace these with the GitHub URL of each contributor +- Jacopo de Berardinis +- Andrea Poltronieri +related-components: +- informed-by: + - https://w3id.org/polifonia/ontology/music-meta/ +- reuses: # any reused/imported ontology + - https://w3id.org/polifonia/ontology/music-meta/ +- documentation: # link any resource providing documentation for this ontology + - https://w3id.org/polifonia/ontology/music-meta/ +--- + +# pymusicmeta + +To facilitate the reuse of Music Meta and its data conversion into OWL/RDF +Knowledge Graph, we developed a library to map arbitrary music metadata into RDF +triples. This enables a practical and scalable workflow for data lifting to +create Music Knowledge Graphs without expert knowledge of our ontological model. +The library is developed in Python as an extension of RDF-Lib. + +The Music Meta library allows for the creation of RDF triples from textual data, +offering the advantage of easy data generation using our model. The library +provides a range of simple methods for adding triples to a graph, using clear, +concise documentation and straightforward naming conventions. With each triple +added, the library automatically adds alignments to other schema that Music Meta +supports, thus bringing interoperability with ontologies. + +[Link to the website](https://github.com/polifonia-project/music-meta-ontology) \ No newline at end of file diff --git a/content/_spice-h2020/ecosystem/content/_polifonia-project/music-meta-ontology/website/README.md b/content/_spice-h2020/ecosystem/content/_polifonia-project/music-meta-ontology/website/README.md new file mode 100644 index 00000000..5c26929d --- /dev/null +++ b/content/_spice-h2020/ecosystem/content/_polifonia-project/music-meta-ontology/website/README.md @@ -0,0 +1,3 @@ +# Website + +This website is built using [Docusaurus 2](https://docusaurus.io/), a modern static website generator. diff --git a/content/_spice-h2020/ecosystem/content/_polifonia-project/music-meta-ontology/website/docs/intro.md b/content/_spice-h2020/ecosystem/content/_polifonia-project/music-meta-ontology/website/docs/intro.md new file mode 100644 index 00000000..4725ffa4 --- /dev/null +++ b/content/_spice-h2020/ecosystem/content/_polifonia-project/music-meta-ontology/website/docs/intro.md @@ -0,0 +1,3 @@ +# Disclaimer + +The website has been moved! \ No newline at end of file diff --git a/content/_spice-h2020/ecosystem/content/_polifonia-project/music-meta-ontology/website/src/pages/markdown-page.md b/content/_spice-h2020/ecosystem/content/_polifonia-project/music-meta-ontology/website/src/pages/markdown-page.md new file mode 100644 index 00000000..9756c5b6 --- /dev/null +++ b/content/_spice-h2020/ecosystem/content/_polifonia-project/music-meta-ontology/website/src/pages/markdown-page.md @@ -0,0 +1,7 @@ +--- +title: Markdown page example +--- + +# Markdown page example + +You don't need React to write simple standalone pages. diff --git a/content/_spice-h2020/ecosystem/content/_polifonia-project/musicbo-knowledge-graph/README.md b/content/_spice-h2020/ecosystem/content/_polifonia-project/musicbo-knowledge-graph/README.md new file mode 100644 index 00000000..79930664 --- /dev/null +++ b/content/_spice-h2020/ecosystem/content/_polifonia-project/musicbo-knowledge-graph/README.md @@ -0,0 +1,63 @@ +--- +component-id: musicbo-knowledge-graph +name: MusicBO Knowledge Graph +description: The Knowledge Graph about the role of Bologna in the European musical landscape. +type: KnowledgeGraph +release-date: 12/02/2023 +release-number: v0.1 +work-package: +- WP4 +licence: CC-BY_v4 +links: +- https://github.com/polifonia-project/musicbo-knowledge-graph +credits: +- https://github.com/valecarriero +- https://github.com/FiorelaCiroku +- https://github.com/roccotrip +- https://github.com/arianna-graciotti +- https://github.com/EleonoraMarzi +--- + +# musicbo-knowledge-graph +MusicBO Knowledge Graph stores information about the role of music in the city of Bologna from a historical and social perspective. It aims to satisfy the requirements of MusicBO pilot use case, namely conveying knowledge about music performances in Bologna and encounters between musicians, composers, critics and historians who passed through Bologna. + +MusicBO Knowledge Graph is available via the **[MusicBO SPARQL endpoint](https://polifonia.disi.unibo.it/musicbo/sparql)**. + +MusicBO Knowledge Graph is automatically extracted from natural language texts by applying a custom text-to-Knowledge Graph (text2KG) process to the MusicBO corpus documents. The MusicBO corpus is part of the **[Polifonia Corpus](https://github.com/polifonia-project/Polifonia-Corpus)**. + +The process leverages two modules: the **[Polifonia Knowledge Extractor](https://github.com/polifonia-project/Polifonia-Knowledge-Extractor)** pipeline and the **[AMR2Fred](https://github.com/infovillasimius/amr2Fred)** tool. The first one uses **[AMR](https://github.com/amrisi/amr-guidelines/blob/master/amr.md)** (Abstract Meaning Representation) to parse sentences into semantic graphs. The second one transforms AMR graphs into RDF/OWL KGs based on **[FRED](http://wit.istc.cnr.it/stlab-tools/fred/demo/)** logic form by exploiting the similarities between AMR graphs and FRED's output representation, such as being both graph-based and event-centric. The Polifonia Knowledge Extractor pipeline provides input to the **[AMR2Fred](https://github.com/infovillasimius/amr2Fred)** tool. The two modules are orchestrated by the **[Machine Reading suite](https://github.com/anuzzolese/machine-reading)**, which queries both components through the **[Text-to-AMR-to-FRED API](http://framester.istc.cnr.it/txt-amr-fred/api/docs)** and generates RDF named graphs from natural language text. + +The Text2KG process for the automatic creation of the MusicBO KG can be broken down into its main steps as follows: + +1. **[Input.]** For the scope of this Deliverable, we applied our text2KG process to the English and Italian language documents of MusicBO corpus. We took as input 47 documents in English and 51 documents in Italian from the MusicBO corpus. +2. **[Pre-processing.]** The MusicBO corpus documents that we chose as input were originally in .PDF, image or .docx formats. Therefore, we needed to extrapolate the plain text from them, leveraging ad hoc Optical Character Recognition (OCR) technologies from **[textual-corpus-population](https://github.com/polifonia-project/textual-corpus-population)** when necessary. We then performed co-reference resolution: for English language documents, we implemented a co-reference resolution pipeline based on Spacy's **[neuralcoref](https://spacy.io/universe/project/neuralcoref)**. We have not yet implemented any co-reference resolution procedure for the Italian language documents, as we are still evaluating the performances of state-of-the-art Italian language co-reference resolution tools. We also performed rule-based minimal post-OCR corrections and sentence splitting on the extrapolated plain texts. +3. **[Text2AMR Parsing.]** The sentences resulting from the pre-processing steps described at point 2 above are submitted to state-of-the-art neural text-to-AMR parsers. MR has gained significant attention in recent years as a meaning representation formalism, given its ability to abstract away from syntactic variability and its potential to act as an interlingua in scenarios that encompass multilingual textual sources. For sentences in English we used **[SPRING](http://nlp.uniroma1.it/spring/)**. For sentences in Italian, we used **[USeA](https://github.com/SapienzaNLP/usea)**. +4. **[Filtering.]** This step is a preliminary tentative to tackle AMR graphs evaluation. Given that we are concentrating on non-standard texts (historical documents), the results of the state-of-the-art AMR parsers may be inaccurate. Human validation is time-consuming, and there are no standard benchmarks for the semantic parsing of historic and OCRed text. For this reason, we decided to use a back-translation approach that converts the generated AMR graphs back to sentences (AMR2text) to compute similarity scores between the original sentence and the generated one. For English, we used **[SPRING](http://nlp.uniroma1.it/spring/)** for AMR2Text generation and computed **[BLEURT](https://github.com/google-research/bleurt)** as a similarity score. For Italian, we used **[m-AMR2Text](https://github.com/UKPLab/m-AMR2Text)** for AMR2Text generation. Then, we computed the cosine similarity between the embedding of the original and the generated sentences. We generated the embeddings by leveraging **[LASER embeddings](https://github.com/yannvgn/laserembeddings)**, an off-the-shelf multilingual sentence embedding toolkit. We hypothesise that generated sentences with high BLEURT or cosine similarity scores correspond to high-quality graphs. We decided to discard all the graphs in our English AMR graphs bank corresponding to AMR2Text-generated sentences with a negative BLEURT score. With regard to our Italian AMR graphs bank, we decided to discard the graphs associated with AMR2Text-generated sentences having a cosine similarity <0,90. In fact, according to our sample-based qualitative error analysis, negative BLEURT scores and cosine similarity <0,90 corresponded to low-quality generated sentences and, consequentially, to low-quality AMR graphs. The quality issues observed in the AMR graphs correlated with input sentences affected, for example, by severe OCR errors. +5. **[AMR2Fred translation.]** Finally, we transformed the graphs filtered at step above into OWL/RDF Knowledge Graphs that follow **[FRED](http://wit.istc.cnr.it/stlab-tools/fred/demo/)** knowledge representation patterns. This transformation is done by querying the AMR2Fred tool via the **[Machine Reading](https://github.com/anuzzolese/machine-reading)** suite. The output is named graphs produced by using the **[N-Quad](https://www.w3.org/TR/n-quads/)** syntax. Named graphs allow for extending the standard RDF triple model with a "context" element which, among the other features, allows the association of each triple with information about their provenance. In our case, the context element of MusicBO Knowledge Graph triples indicates which sentence's graph the triple is part of. At this step, we enrich the resulting FRED-like RDF/OWL KGs using **[Framester](http://etna.istc.cnr.it/framester_web/)** semantic hub. In fact, thanks to Framester, the information implicitly enclosed in the text could be unveiled by integrating knowledge from different knowledge bases such as [FrameNet](https://framenet.icsi.berkeley.edu/fndrupal/), [WordNet](https://wordnet.princeton.edu/), [VerbNet](https://verbs.colorado.edu/verbnet/), [BabelNet](https://babelnet.org/), [DBPedia](https://www.dbpedia.org/), [Yago](https://www.mpi-inf.mpg.de/departments/databases-and-information-systems/research/yago-naga/yago/), [DOLCE-Zero](http://www.ontologydesignpatterns.org/ont/d0.owl) and other resources. In particular, we enrich the FRED-like RDF/OWL KGs with Word Sense Disambiguation (WSD) information. The WSD process currently implemented applies to those elements of the FRED-like RDF/OWL KGs which correspond to nodes of the AMR graphs that are not linked to any lexical resources or knowledge bases, namely all the AMR graphs nodes that are not treated as **[PropBank](http://propbank.github.io/)** predicates or named entities. The implemented WSD process consists of submitting the sentence associated with the FRED-like RDF/OWL KG to **[EWISER](https://github.com/SapienzaNLP/ewiser)**, a WSD system, and of associating the resulting WSD information with the AMR2Fred nodes whose corresponding labels in the AMR graph matches the lemmas of the processed sentence (if the graph's node is among those that need to be enriched with WSD information). We leverage [WordNet](https://wordnet.princeton.edu/) as the lexical resource from which we take the word senses information. + +We provide in folder "input_csv\" of this repository the input CSV that contains the pre-processed and filtered EN sentences of the MusicBO corpus (steps 1-4 of the process described above). The CSV is ready to be sent as input to the **[Machine Reading](https://github.com/anuzzolese/machine-reading)** suite, to enable the creation of named graphs as per step 5 of the Text2KG process. The CSV is made of 6 columns: + +* corpus_id, which is an identifier for a corpus; +* document_id, which is an identifier of a document within a corpus; +* sentence_id, which is an identifier of document sentence within a corpus; +* content, which is the content of sentence to process; +* document_uri, which is a link to a Web page from which the document of a corpus can be retrieved; +* corpus_uri, which is the DOI of a corpus. + +Here's an excerpt of the csv file: + +|corpus_id|document_id|sentence_id|content |document_uri |corpus_uri | +|---------|-----------|-----------|----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------|--------------------------------------------------|--------------------------------------| +|MusicBO |1 |1009 |The more I deviated from the path which my wife regarded as the only profitable one, due partly to the change of my views (which I grew ever less willing to communicate to her), and partly to the modification in my attitude towards the stage, the more my wife retreated from that position of close fellowship with me which my wife had enjoyed in former years, and which my wife thought herself justified in connecting in some way with my successes.|https://freeditorial.com/en/books/my-life-volume-1|https://doi.org/10.5281/zenodo.6672165| +|MusicBO |4 |363 |And "off and on" we should be sure to undertake something to give vent to our energies in the outer world. |https://www.gutenberg.org/cache/epub/4234/pg4234.txt|https://doi.org/10.5281/zenodo.6672165| +|MusicBO |35 |28 |To this Artusi replied in Considerationi musicali, printed in Seconda parte dell'Artusi (1603), mockingly dedicated to Bottrigari. |https://doi.org/10.1093/gmo/9781561592630.article.01383|https://doi.org/10.5281/zenodo.6672165| + +We provide in folder "data\" of this repository the KGs obtained through our Text2KG process described above. Stats of the KGs latest release can be found in the table below: + +| **Languages** | #sent-AMR graph pairs (Text2AMR) | #filtered sent-AMR graph pairs (Automatic metrics evaluation) | #named graphs (AMR2RDF) | #triples | +|---------------|----------------------------------|---------------------------------------------------------------|-------------------------|----------| +| **EN** | 51.814 | 5.965 | 5.798 | 410.132 | +| **ITA** | 10.563 | 1.815 | 1.759 | 118.162 | +| **TOTAL** | 15.747 | 7.780 | 7.557 | 531.222 | + +MusicBO Knowledge Graph is described in a dedicated **[MELODY data story](https://projects.dharc.unibo.it/melody/musicbo/music_in_bologna_knowledge_graph_overview)**. diff --git a/content/_spice-h2020/ecosystem/content/_polifonia-project/ontology-network/README.md b/content/_spice-h2020/ecosystem/content/_polifonia-project/ontology-network/README.md new file mode 100644 index 00000000..2e851894 --- /dev/null +++ b/content/_spice-h2020/ecosystem/content/_polifonia-project/ontology-network/README.md @@ -0,0 +1,61 @@ +# Polifonia Ontology Network (PON) + +This repository contains references to the Polifonia Ontology Network. + +[![DOI](https://zenodo.org/badge/372536364.svg)](https://zenodo.org/badge/latestdoi/372536364) +[![License: CC BY 4.0](https://img.shields.io/badge/License-CC_BY_4.0-lightgrey.svg)](https://creativecommons.org/licenses/by/4.0/) + +> 🔗 Ontology URI: [https://w3id.org/polifonia/ontology/ontology-network/](https://w3id.org/polifonia/ontology/ontology-network/) + +The Polifonia Ontology Network (PON) provides a modular backbone of music +ontologies to address both cultural heritage and more general queries in the +music domain. As illustrated in the diagram below, `PON v1.0` comprises 15 +ontology module that are organised thematically (colours, horizontal view) and +hierarchically, to highlight their dependencies (vertical view). At the bottom +of the architecture lies our `Core` module (providing general-purpose elements +of design, ODPs, and alignments) and the reused ontologies. Four foundational +models provide interoperability across PON through their abstract design: +`Source`, `Instrument`, `Music Meta`, and `Music Representation`. These are +specialised and extended in the upper levels to add functionalities and +contextualise specific domains. + +![overview](assets/pon_architecture.png) + +A summary of PON modules is given in the table below, with links to the +repositories storing the modules with documentation, diagrams, and examples. +Through our foundational models, PON ontologies can be applied to a wide set of +music projects, and the modular design simplifies extensibility and maintenance. + + +## Ontology modules of Polifonia Ontology Network + +| **Ontology** | **Prefix** | **Description** | **URI** | **Repository** | +|--------------------------|------------|------------------------------------------------------------------------------------------------|-----------------------------------------------------------|--------------------------------------------------------------------| +| **FULL** | pon: | The FULL ontology module stored in this repository, which imports all the modules listed here. | https://w3id.org/polifonia/ontology/ontology-network/ | https://github.com/polifonia-project/ontology-network/ | +| **Core** | core: | Elements of general reuse and ontology design patterns. | https://w3id.org/polifonia/ontology/core/ | https://github.com/polifonia-project/core-ontology | +| **Music Meta** | mm: | Achieving interoperability of music metadata. | https://w3id.org/polifonia/ontology/music-meta/ | https://github.com/polifonia-project/musicmeta-ontology | +| **Music Representation** | mr: | Foundational model to describe arbitrary musical content. | https://w3id.org/polifonia/ontology/music-representation/ | https://github.com/polifonia-project/music-representation-ontology | +| **Music Instrument** | mop: | Instruments and their evolution through time and space. | https://w3id.org/polifonia/ontology/music-instrument/ | https://github.com/polifonia-project/music-instrument-ontology | +| **Source** | src: | Representing musical sources and their context of production. | https://w3id.org/polifonia/ontology/source/ | https://github.com/polifonia-project/source-ontology | +| **Tunes** | tunes: | A specialisation of Music Meta for folk music. | https://w3id.org/polifonia/ontology/tunes/ | https://github.com/polifonia-project/tunes-ontology | +| **CoMeta** | com: | An extension of Music Meta to represent music corpora. | https://w3id.org/polifonia/ontology/cometa/ | https://github.com/polifonia-project/cometa-ontology | +| **Music Projection** | mp: | Achieving interoperability of music notation systems. | https://w3id.org/polifonia/ontology/music-projection/ | https://github.com/polifonia-project/music-projection-ontology | +| **Organs** | organ: | A rich descriptive model of organs and building methods. | https://w3id.org/polifonia/ontology/organs/ | https://github.com/polifonia-project/organs-ontology | +| **Bells** | bell: | Describing bells, bell towers and bell ringers. | https://w3id.org/polifonia/ontology/bells/ | https://github.com/polifonia-project/bells-ontology | +| **Music Algorithm** | mx: | Computational methods for music and their parametrisation. | https://w3id.org/polifonia/ontology/music-algorithm/ | https://github.com/polifonia-project/music-algorithm-ontology | +| **Music Analysis** | ma: | Music analysis through reasoning using modal-tonal theories. | https://w3id.org/polifonia/ontology/music-analysis/ | https://github.com/polifonia-project/music-analysis-ontology | +| **Music Annotation** | ann: | A wrapper of ontologies for music annotations (audio, symbolic). | https://w3id.org/polifonia/ontology/music-annotation/ | https://github.com/polifonia-project/music-annotation-ontology | + + +## Contributing + +🙌 We encourage you to explore and leverage the Polifonia Ontology Network v1.0 +to enhance your music-related applications, research, and collaborations. Your +feedback and contributions are highly appreciated as we continue to refine and +expand this ontology network. + +Let the power of ontologies unlock new possibilities in the world of music! 🎵✨ + +## License + +This work is licensed under a [Creative Commons Attribution 4.0 International License](http://creativecommons.org/licenses/by/4.0/). diff --git a/content/_spice-h2020/ecosystem/content/_polifonia-project/ontology-network/checklist.md b/content/_spice-h2020/ecosystem/content/_polifonia-project/ontology-network/checklist.md new file mode 100644 index 00000000..a8b8429c --- /dev/null +++ b/content/_spice-h2020/ecosystem/content/_polifonia-project/ontology-network/checklist.md @@ -0,0 +1,37 @@ + +# PON checklist for modules + +> Example of how an ontology module in PON should like are the [JAMS ontology](https://github.com/polifonia-project/jams-ontology) and the [BELL ontology](https://github.com/polifonia-project/bell-ontology). + + +## :red_circle: Must have + +[ ] Create the main OWL file in the `ontology/` folder. + +[ ] Create a README for the ontology (compliant with our [rulebook](https://github.com/polifonia-project/rulebook)). + +[ ] Make sure the ontology is published at a persistent URI. See how to implement redirect rules. + +[ ] Create example data for ontology testing, using the competency questions. + +[ ] Provide a licence specification for the ontology. + + +## :orange_circle: Should have + +[ ] Include high-quality documentation for reuse, with diagrams, examples and tutorials. + +[ ] Provide a canonical citation associated with the resource. + +[ ] If applicable, please include links to any KG (or other projects) that was developed using this ontology. + +[ ] Document on reuse/extension of resources (including upper ontologies and/or patterns). + + +## :yellow_circle: Nice to have + +[ ] Does the ontology include any data transformation utilities? + +[ ] Specify if the ontology is of **specific** pilot reuse, or has more **general** applicability. If the latter applies, please provide some examples or explain how this is possible. + +[ ] Specify if there is potential for extensibility to meet future requirements. diff --git a/content/_spice-h2020/ecosystem/content/_polifonia-project/ontology-network/header.md b/content/_spice-h2020/ecosystem/content/_polifonia-project/ontology-network/header.md new file mode 100644 index 00000000..5d94a773 --- /dev/null +++ b/content/_spice-h2020/ecosystem/content/_polifonia-project/ontology-network/header.md @@ -0,0 +1,74 @@ +--- +component-id: https://w3id.org/polifonia/ontology/ontology-network/ +type: Ontology +name: Polifonia Ontology Network (PON) +description: Top level description of the ontology modules in the Polifonia Ontology Network, with pointers to each submodule. +image: assets/pon_architecture.png +work-package: +- WP2 +pilot: +- INTERLINK +- TUNES +- FACETS +- TONALITIES +- MEETUPS +- MUSICBO +- CHILD +- ORGANS +- BELLS +- TUNES +- ACCESS +project: polifonia-project +resource: ontology/ontology-network.owl +release-date: 09/05/2023 +release-number: v1.0 +release-link: https://github.com/polifonia-project/ontology-network/releases +doi: 10.5281/zenodo.7919970 +changelog: https://github.com/polifonia-project/ontology-network/releases +licence: +- CC-BY_v4 +copyright: "Copyright (c) 2023 Polifonia Ontology Network Contributors" +contributors: # replace these with the GitHub URL of each contributor +- Valentina Anita Carriero +- Andrea Poltronieri +- Nicolas Lazzari +- Jacopo de Berardinis +- Fiorela Ciroku +related-components: +- informed-by: + - https://github.com/polifonia-project/polifoniacq-dataset + - https://github.com/polifonia-project/idea +- reuses: +- https://w3id.org/polifonia/ontology/core/ +- https://w3id.org/polifonia/ontology/music-meta/ +- https://w3id.org/polifonia/ontology/music-representation/ +- https://w3id.org/polifonia/ontology/source/ +- https://w3id.org/polifonia/ontology/instrument/ +- https://w3id.org/polifonia/ontology/tunes/ +- https://w3id.org/polifonia/ontology/music-projection/ +- https://w3id.org/polifonia/ontology/organs/ +- https://w3id.org/polifonia/ontology/cometa/ +- https://w3id.org/polifonia/ontology/bells/ +- https://w3id.org/polifonia/ontology/music-algorithm/ +- https://w3id.org/polifonia/ontology/music-analyis/ +- https://w3id.org/polifonia/ontology/music-annotation/ +- documentation: + - https://github.com/polifonia-project/ontology-network # FIXME +--- +# Polifonia Ontology Network (PON) + +The Polifonia Ontology Network (PON) provides a modular backbone of music +ontologies to address both cultural heritage and more general queries in the +music domain. As illustrated in the diagram below, `PON v1.0` comprises 15 +ontology module that are organised thematically (colours, horizontal view) and +hierarchically, to highlight their dependencies (vertical view). At the bottom +of the architecture lies our `Core` module (providing general-purpose elements +of design, ODPs, and alignments) and the reused ontologies. Four foundational +models provide interoperability across PON through their abstract design: +`Source`, `Instrument`, `Music Meta`, and `Music Representation`. These are +specialised and extended in the upper levels to add functionalities and +contextualise specific domains. + +![overview](assets/pon_architecture.png) + +[Link to the website](https://github.com/polifonia-project/ontology-network) \ No newline at end of file diff --git a/content/_spice-h2020/ecosystem/content/_polifonia-project/ontology-network/website/README.md b/content/_spice-h2020/ecosystem/content/_polifonia-project/ontology-network/website/README.md new file mode 100644 index 00000000..5c26929d --- /dev/null +++ b/content/_spice-h2020/ecosystem/content/_polifonia-project/ontology-network/website/README.md @@ -0,0 +1,3 @@ +# Website + +This website is built using [Docusaurus 2](https://docusaurus.io/), a modern static website generator. diff --git a/content/_spice-h2020/ecosystem/content/_polifonia-project/ontology-network/website/blog/2023-04-10-welcome/index.md b/content/_spice-h2020/ecosystem/content/_polifonia-project/ontology-network/website/blog/2023-04-10-welcome/index.md new file mode 100644 index 00000000..9b69c44c --- /dev/null +++ b/content/_spice-h2020/ecosystem/content/_polifonia-project/ontology-network/website/blog/2023-04-10-welcome/index.md @@ -0,0 +1,10 @@ +--- +slug: welcome +title: Music Meta is out +authors: [jacopo, andrea] +tags: [musicmeta, hello, polifonia] +--- + +The first version of Music Meta is out + +![Overview of Music Meta](https://raw.githubusercontent.com/polifonia-project/music-meta-ontology/main/diagrams/artist.png) \ No newline at end of file diff --git a/content/_spice-h2020/ecosystem/content/_polifonia-project/ontology-network/website/blog/2023-04-19-evaluation.md b/content/_spice-h2020/ecosystem/content/_polifonia-project/ontology-network/website/blog/2023-04-19-evaluation.md new file mode 100644 index 00000000..2c6ce1a3 --- /dev/null +++ b/content/_spice-h2020/ecosystem/content/_polifonia-project/ontology-network/website/blog/2023-04-19-evaluation.md @@ -0,0 +1,11 @@ +--- +slug: first-blog-post +title: PON v1.0 release +authors: [jacopo, andrea, nicolas, valentina] +tags: [hola, pon] +--- + +Coming soon! + + + \ No newline at end of file diff --git a/content/_spice-h2020/ecosystem/content/_polifonia-project/ontology-network/website/docs/bells/intro.md b/content/_spice-h2020/ecosystem/content/_polifonia-project/ontology-network/website/docs/bells/intro.md new file mode 100644 index 00000000..901ebeb5 --- /dev/null +++ b/content/_spice-h2020/ecosystem/content/_polifonia-project/ontology-network/website/docs/bells/intro.md @@ -0,0 +1,13 @@ +--- +sidebar_position: 1 +--- + +# Introduction + +[![Static Badge](https://img.shields.io/badge/pylode-online-purple?style=plastic)](pathname:///pylode/bells.html) + +The Bells Ontology opens the door to a fascinating world of bells, bell towers, and bell ringers. At the heart of this module lies a meticulous description of bells, capturing their essence through measurable, intrinsic aspects like weight, materials, and conservation status. + +When it comes to bells, their significance is not confined to mere objects; it extends to the rich history and culture they represent. The Bells Ontology enables the documentation of crucial historical data, such as authorship attribution and dating, providing a comprehensive understanding of these resonating treasures. Additionally, this module thoughtfully considers the techniques used in bell execution, recognizing the artistry and craftsmanship involved in creating their melodious tones. + +The Bells Ontology is directly reuses the [ArCo ontology](http://wit.istc.cnr.it/arco/). Leveraging and expanding upon the existing ArCo modules, this ontology ensures **compatibility** and **interoperability**, making it a valuable asset for researchers and historians. \ No newline at end of file diff --git a/content/_spice-h2020/ecosystem/content/_polifonia-project/ontology-network/website/docs/cometa/intro.md b/content/_spice-h2020/ecosystem/content/_polifonia-project/ontology-network/website/docs/cometa/intro.md new file mode 100644 index 00000000..bb2fe4f9 --- /dev/null +++ b/content/_spice-h2020/ecosystem/content/_polifonia-project/ontology-network/website/docs/cometa/intro.md @@ -0,0 +1,28 @@ +--- +sidebar_position: 1 +--- + +# Introduction + +[![Static Badge](https://img.shields.io/badge/pylode-online-purple?style=plastic)](pathname:///pylode/cometa.html) + + +An extension of [Music Meta](https://github.com/polifonia-project/musicmeta-ontology) to describe the metadata of music **co**llections, **co**rpora, **co**ntainers, or simply music datasets! Here, metadata is described at the collection-level (data curator, annotations provided, availability of audio music, etc.), and at the content-level, (e.g., the title, artist, release of each piece in a dataset). The design of CoMeta is informed by a survey of Music Information Retrieval datasets, which allowed for the categorisation of common fields. + +The ontology designed to describe music datasets as containers of music-related data with specific characteristics and annotations. + +- **Collection Information**: the ontology captures information about the dataset as a whole, including the number of records (compositions or performances), genres, year of release, collection metadata (project investigator, university, etc.), and content metadata (specification document with track-level information like title, artist, release, MusicBrainz identifier). This also includes properties such as music media type (audio or symbolic), duration, audio format (MP3, WAV, FLAC), symbolic format (MIDI, MusicXML, MEI), and other additional media (audio features, rankings, etc.). + +- **Annotations**: to represent the annotations provided within the dataset, which are crucial for MIR tasks. It would include various types of annotations contributed by domain experts (musicologists, composition teachers) or listeners, covering aspects like music structure, key, chord progressions, emotions, listening habits, etc. + +- **Computational tasks**: to define the different tasks that a dataset enables based on the available annotations. Examples in MIR include music emotion recognition, pattern extraction, cadence detection, etc. Together with the [Music Algorithm](https://github.com/polifonia-project/music-algorithm-ontology) ontology (its sibling ontology) it also allows to track the performance/accuracy of computational methods tested on each dataset. + +- **Access and availability**: to capture information regarding the accessibility of the dataset, including whether it is open, on-demand, or closed, and whether it can be accessed online or requires manual provisioning. It may also include details about an API if available. + +- **License/Copyright**: to represent the licensing and copyright information associated with the dataset, ensuring compliance and proper attribution. + +- **References**: to provide links to official websites and academic manuscripts describing the dataset and its collection process, facilitating proper citation and reference. + + +By incorporating and supporting these requirements, the ontology would provide a structured representation of music datasets, their metadata, annotations, and interconnections. It would enable researchers and practitioners to explore, analyse, and utilize the datasets more effectively, promote interoperability, and facilitate the automatic discovery and extraction of knowledge from music-related data. + diff --git a/content/_spice-h2020/ecosystem/content/_polifonia-project/ontology-network/website/docs/diagrams_howto.md b/content/_spice-h2020/ecosystem/content/_polifonia-project/ontology-network/website/docs/diagrams_howto.md new file mode 100644 index 00000000..80df2232 --- /dev/null +++ b/content/_spice-h2020/ecosystem/content/_polifonia-project/ontology-network/website/docs/diagrams_howto.md @@ -0,0 +1,12 @@ +--- +sidebar_position: 2 +--- + +# Notation primer + +**Graffoo**, a Graphical Framework for OWL Ontologies, is an open source tool that can be used to present the classes, properties and restrictions within OWL ontologies, or sub-sections of them, as clear and easy-to-understand diagrams. In sum, yellow rounded boxes denote *classes*, blue and green arrows are *object* and *datatype properties* respectively, purple circles denote *individuals*, and green polygons are *datatypes*. + +For more info on the Graffoo notation, please find the illustration below from [this link](https://essepuntato.it/graffoo/). + +![Graffoo doc](https://essepuntato.it/assets/img/graffoo/legend.png) + diff --git a/content/_spice-h2020/ecosystem/content/_polifonia-project/ontology-network/website/docs/intro.md b/content/_spice-h2020/ecosystem/content/_polifonia-project/ontology-network/website/docs/intro.md new file mode 100644 index 00000000..0f6fec14 --- /dev/null +++ b/content/_spice-h2020/ecosystem/content/_polifonia-project/ontology-network/website/docs/intro.md @@ -0,0 +1,8 @@ +--- +sidebar_position: 1 +--- + +# Welcome + +This is the entry point of PON documentation, where you will find tutorials, +examples and bla bla bla. \ No newline at end of file diff --git a/content/_spice-h2020/ecosystem/content/_polifonia-project/ontology-network/website/docs/music-algorithm/intro.md b/content/_spice-h2020/ecosystem/content/_polifonia-project/ontology-network/website/docs/music-algorithm/intro.md new file mode 100644 index 00000000..8d211d0b --- /dev/null +++ b/content/_spice-h2020/ecosystem/content/_polifonia-project/ontology-network/website/docs/music-algorithm/intro.md @@ -0,0 +1,109 @@ +--- +sidebar_position: 1 +--- + +# Introduction + +[![Static Badge](https://img.shields.io/badge/pylode-algo-purple?style=plastic)](pathname:///pylode/algorithm-odp.html) +[![Static Badge](https://img.shields.io/badge/pylode-ma-purple?style=plastic)](pathname:///pylode/music-algorithm.html) + + +The Music Algorithm module formalises algorithms that can operate on music metadata (using the [``Music Meta``](https://github.com/polifonia-project/music-meta-ontology) module), and musical content (via the [``Music Representation``](https://github.com/polifonia-project/music-representation-ontology) module). The module commitments are similar to those defined by Diamantini et al. in [1]. Indeed, an algorithm is characterised by three main components: a formalisation, which can be theoretical (e.g. pseudocode) or executable (e.g. using a programming language); a parametrisation (e.g. input data); and the kind of task it solves. The latter defines a set of entities that are processed alongside the input and output data requirements and the final goal achieved. The module allows theoretical and quantitative performances to be represented in the context of the algorithm’s parametrisation. + + +Through an abstract and general definition, the formalisation in Music Algorithm can be seen as a general pattern, capable of representing any algorithm regardless of the domain of application. In the context of music, the output of the algorithm is considered an analysis, which is then represented via the Representation module. + + +## Examples + +Expressing an audio-based music similarity. + +``` +PREFIX mm: +PREFIX mr: +PREFIX ma: + +ma:MFCCAudioFeatures rdfs:subClassOf ma:ComputationalAnalysis . + +A a mm:MusicEntity . +A mr:hasAnnotation [ + :hasAnnotationContent [ + :hasSubject AudioFeaturesOfA +] . + +AudioFeaturesOfA a ma:MFCCAudioFeatures . +LibrosaMFCCExtractor a ma:ExecutableFormalisation ; + ma:outputs AudioFeaturesOfA . + +B a mm:MusicEntity . +B mr:hasAnnotation [ + :hasAnnotationContent [ + :hasSubject AudioFeaturesOfB +] . + +AudioFeaturesOfB a ma:MFCCAudioFeatures . +LibrosaMFCCExtractor a ma:ExecutableFormalisation ; + ma:outputs AudioFeaturesOfB . + +ma:AudioSignalSimilarityTask a ma:Task ; + ma:requiresInput ma:AudioSignal ; + ma:producesOutput xsd:float ; + ma:accomplishes ma:MusicSimilarity ; + ma:isOfTaskType ma:Similarity ; + ma:hasProcess [ + ma:isOfProcessingType ma:SignalComparison ; + ma:involves [ + ma:refersTo ma:AudioSignal ; + ma:hasRole ma:pairwise_similarity_source ; + ] ; + ma:involves [ + ma:refersTo ma:AudioSignal ; + ma:hasRole ma:pairwise_similarity_target ; + ] + ] + +Librosa_MFCC_SSM_Algorithm a ma:Algorithm ; + ma:performsTask ma:SSM_MFCC_SimilarityTask ; + ma:hasParametrisation [ + ma:usesData [ + ma:involves ma:MFCCAudioFeatures ; + ma:hasRole ma:pairwise_similarity_source + ] ; + ma:usesData [ + ma:involves ma:MFCCAudioFeatures ; + ma:hasRole ma:pairwise_similarity_source + ] ; + ] . + + +SimAtoB a ma:ComputationalAnalysis ; + mr:hasAnnotation [ + mr:hasAnnotator Librosa_MFCC_SSM_Algorithm ; + mr:hasAnnotationType ma:Similarity ; + mr:hasAnnotationContent [ mr:hasSubject 0.8^^xsd:float ] . ; + ] ; + +A mr:hasAnnotation [ + mr:derivesFrom SimAtoB ; + mr:hasAnnotator Librosa_MFCC_SSM_Algorithm ; + mr:hasAnnotationType ma:pairwise_similarity_source ; + mr:describes [ + mr:hasTemporalLocation [ # Not sure if the predicates are this ones - should check but the idea is that the temporal location is the whole piece + mr:start 0 + mr:end 100 + ] + ] ; +] + +B mr:hasAnnotation [ + mr:derivesFrom SimAtoB ; + mr:hasAnnotator Librosa_MFCC_SSM_Algorithm ; + mr:hasAnnotationType ma:pairwise_similarity_target ; + mr:describes [ + mr:hasTemporalLocation [ # Not sure if the predicates are this ones - should check but the idea is that the temporal location is the whole piece + mr:start 0 + mr:end 100 + ] + ] ; + ]. +``` diff --git a/content/_spice-h2020/ecosystem/content/_polifonia-project/ontology-network/website/docs/music-instrument/intro.md b/content/_spice-h2020/ecosystem/content/_polifonia-project/ontology-network/website/docs/music-instrument/intro.md new file mode 100644 index 00000000..37a6c41f --- /dev/null +++ b/content/_spice-h2020/ecosystem/content/_polifonia-project/ontology-network/website/docs/music-instrument/intro.md @@ -0,0 +1,18 @@ +--- +sidebar_position: 1 +--- + +# Introduction + +[![Static Badge](https://img.shields.io/badge/pylode-mi-purple?style=plastic)](pathname:///pylode/music-instrument.html) + +The Instrument Module describes musical instruments as mediums of performance and their technical properties. Given that numerous taxonomies of instruments into groups and families exist (e.g. Hornbostel-Sachs, MIMO, Mu- sicBrainz) and finding common categorisations is an open problem [1], our module provides an abstraction capable to express arbitrary classifications. This is achieved by leveraging the Information-Realisation and the Collection ODPs. Overall, the module allows to: (i) refer to instruments as entities (an instrumen- tation of a piece for “piano” and “viola”) as well as conceptually (e.g. a viola has 4 strings); (ii) support the integration with different taxonomies and vocabularies, such as [2]; (iii) describe the evolution of instruments in time and space (e.g. a viola as a cultural heritage object being relocated). This provides a foundational level where contributors can “plug” their instrument-specific ontologies [3]. + + +## References + +[1] Kolozali, S., Barthet, M., Fazekas, G., Sandler, M.B.: Knowledge representation issues in musical instrument ontology design. In: ISMIR. pp. 465–470 (2011) + +[2] Lisena, P., Todorov, K., Cecconi, C., Leresche, F., Canno, I., Puyrenier, F., Voisin, M., Le Meur, T., Troncy, R.: Controlled vocabularies for music metadata. In: IS- MIR: International Society for Music Information Retrieval (2018) + +[3] Zanoni, M., Setragno, F., Sarti, A., et al.: The violin ontology. In: Proc. of the 9th Conference on Interdisciplinary Musicology (CIM14). Citeseer (2014) \ No newline at end of file diff --git a/content/_spice-h2020/ecosystem/content/_polifonia-project/ontology-network/website/docs/music-meta/advanced/extra-a.md b/content/_spice-h2020/ecosystem/content/_polifonia-project/ontology-network/website/docs/music-meta/advanced/extra-a.md new file mode 100644 index 00000000..b27441dc --- /dev/null +++ b/content/_spice-h2020/ecosystem/content/_polifonia-project/ontology-network/website/docs/music-meta/advanced/extra-a.md @@ -0,0 +1,13 @@ +--- +sidebar_position: 1 +--- + +# Overview of pyMusicMeta + +To facilitate the reuse of Music Meta and its data conversion into OWL/RDF Knowledge Graph, we developed a library to map arbitrary music metadata into RDF triples. +This enables a practical and scalable workflow for data lifting to create Music Knowledge Graphs without expert knowledge of our ontological model. +The library is developed in Python as an extension of [RDF-Lib](https://rdflib.readthedocs.io/en/stable/). + +The Music Meta library allows for the creation of RDF triples from textual data, offering the advantage of easy data generation using our model. +The library provides a range of simple methods for adding triples to a graph, using clear, concise documentation and straightforward naming conventions. +With each triple added, the library automatically adds alignments to other schema that Music Meta supports, thus bringing interoperability with ontologies. \ No newline at end of file diff --git a/content/_spice-h2020/ecosystem/content/_polifonia-project/ontology-network/website/docs/music-meta/advanced/extra-b.md b/content/_spice-h2020/ecosystem/content/_polifonia-project/ontology-network/website/docs/music-meta/advanced/extra-b.md new file mode 100644 index 00000000..880dbb30 --- /dev/null +++ b/content/_spice-h2020/ecosystem/content/_polifonia-project/ontology-network/website/docs/music-meta/advanced/extra-b.md @@ -0,0 +1,13 @@ +--- +sidebar_position: 2 +--- + +# Contributing to Music Meta + +## Extending the ontology + +An example here is CoMeta + +## Specialising the ontology + +An example is the Tunes ontology \ No newline at end of file diff --git a/content/_spice-h2020/ecosystem/content/_polifonia-project/ontology-network/website/docs/music-meta/getting-started/congratulations.md b/content/_spice-h2020/ecosystem/content/_polifonia-project/ontology-network/website/docs/music-meta/getting-started/congratulations.md new file mode 100644 index 00000000..424381da --- /dev/null +++ b/content/_spice-h2020/ecosystem/content/_polifonia-project/ontology-network/website/docs/music-meta/getting-started/congratulations.md @@ -0,0 +1,19 @@ +--- +sidebar_position: 6 +--- + +# Congratulations! + +You have just learned the **basics of the Music Meta Ontology**. Here is a full example of *Let it be* to wrap up. + +![Example banner](https://github.com/polifonia-project/music-meta-ontology/blob/main/examples/beatles_example.png?raw=true) + +Our ecosystem has **much more to offer**! Check out the tutorial on the `pyMusicMeta` in the next section and start creating Music Knowledge Graphs with Music Meta. + + +Anything **unclear** or **buggy** in this tutorial? [Please report it!](https://github.com/polifonia-project/music-meta-ontology/issues) + +## What's next? + +- Find the full documentation of Music Meta at [this link](pathname:///pylode/music-meta.html) +- Learn how to use [`pyMusicMeta`](/docs/category/advanced-guide) diff --git a/content/_spice-h2020/ecosystem/content/_polifonia-project/ontology-network/website/docs/music-meta/getting-started/create-an-artist.md b/content/_spice-h2020/ecosystem/content/_polifonia-project/ontology-network/website/docs/music-meta/getting-started/create-an-artist.md new file mode 100644 index 00000000..8532bc89 --- /dev/null +++ b/content/_spice-h2020/ecosystem/content/_polifonia-project/ontology-network/website/docs/music-meta/getting-started/create-an-artist.md @@ -0,0 +1,15 @@ +--- +sidebar_position: 2 +--- + +# Create an Artist + +To represent music creatives the class `mm:MusicArtist` generalises over musicians (`mm:Musician`), ensembles (`mm:MusicEnsemble`), and computational methods (`mm:MusicAlgorithm`), as illustrated in the diagram below. +Musicians are seen as a specialisation of persons who can optionally be associated to a medium of performance (e.g. voice, guitar), and be part of a music ensemble (e.g. `MusicGroup`, `Orchestra`, `Choir`). +Depending on the data available, the latter can be expressed either through a membership relationship (`core:isMemberOf`), a specialisation of the former, such as `mm:isSingerOf`, or through a `mm:MusicEnsembleMembership` when the period of participation of the musician is available. + +![Example banner](https://raw.githubusercontent.com/polifonia-project/music-meta-ontology/main/diagrams/artist.png) + +All music artists can be associated to (one or more) `mm:MusicGenre`(s), express influences or collaborations, and share a period of activity. +Here, the start date refers to the foundation for music ensembles, whereas the end date is used for discontinued projects for algorithms. + diff --git a/content/_spice-h2020/ecosystem/content/_polifonia-project/ontology-network/website/docs/music-meta/getting-started/create-music.md b/content/_spice-h2020/ecosystem/content/_polifonia-project/ontology-network/website/docs/music-meta/getting-started/create-music.md new file mode 100644 index 00000000..b4e5a5ca --- /dev/null +++ b/content/_spice-h2020/ecosystem/content/_polifonia-project/ontology-network/website/docs/music-meta/getting-started/create-music.md @@ -0,0 +1,31 @@ +--- +sidebar_position: 2 +--- + +# Create a Music Entity + +The focal point of Music Meta is the `mm:MusicEntity` class (figure below). +This class represents an Information Object, which is defined as the sum of all the elements that make up a piece of music. +A Music Entity is composed of several components, including lyrics (generalised through `mm:Text` to also account for `mm:Libretto`), the musical composition (`mm:CompositionObject`) and its instrumentation (`mm:Instrumentation`). + +![Example banner](https://github.com/polifonia-project/music-meta-ontology/blob/main/diagrams/creation.png?raw=true) + +A `mm:CompositionObject` describes the form of the composition (`mm:FormType`), its constituents parts (e.g. `mm:Movement` or `mm:Section`), and its key (`mm:Key`). +In addition, its datatype properties describe the tempo of the composition (`mm:tempo`) and its order (`mm:orderNumber`). +A `mm:Instrumentation` can instead be formalised in a `mm:Score`, which can be either digital or paper. +Through the score, the instrumentation describes one or more `mm:MediumOfPerformance`, each of which has a cardinality (e.g. 3 violins). + +![Example banner](https://github.com/polifonia-project/music-meta-ontology/blob/main/diagrams/entity.png?raw=true) + +## Relationships and grouping +It is also possible to describe relationships between different Music Entities, defined by the part (`mm:hasPart`) and the derivation (`mm:isDerivedFrom`) properties. +Derivations are used at the user's discretion, based on the dominance of concept [2] (which criteria attribute proper identity to a musical entity) and can be of different types: revision, transposition, cover, reconstruction, reduction, etc. +This makes it possible to describe different types of compositions, rearrangements and modifications of an original piece, as well as influences and more complex types of derivations. +For example, the production of a cover song (e.g. in a different musical genre) may keep the lyrics and introduce a new composition and instrumentation, hence resulting in a new `mm:MusicEntity`. +In addition, Music Entities can be organised in `mm:Collection`, according to a `mm:CollectionConcept` that binds them together. + +In sum, the model provides flexibility across periods and genres as the proposed classes allow generalisations to be made about the text, the musical composition and its arrangement. +Through the specialisation of classes, depending on the target domain/application, specificity can easily be achieved. +For example, a tune family can be seen as a `mm:Collection` encompassing several tunes (as music entities) based on specific criteria (e.g. similarity, provenance). + +[2] DOREMUS \ No newline at end of file diff --git a/content/_spice-h2020/ecosystem/content/_polifonia-project/ontology-network/website/docs/music-meta/getting-started/links_provenance.md b/content/_spice-h2020/ecosystem/content/_polifonia-project/ontology-network/website/docs/music-meta/getting-started/links_provenance.md new file mode 100644 index 00000000..7256dc16 --- /dev/null +++ b/content/_spice-h2020/ecosystem/content/_polifonia-project/ontology-network/website/docs/music-meta/getting-started/links_provenance.md @@ -0,0 +1,16 @@ +--- +sidebar_position: 4 +--- + +# Links and provenance + +We propose a pattern based on `RDF*` [4] to describe the provenance at different levels (figure below). +The use of RDF* is particularly useful for this purpose, as it allows to embed provenance information to every triple in the dataset. +This simplifies and streamlines the model, eliminating the need for n-ary relations or reification for each triple. + +![Example banner](https://github.com/polifonia-project/music-meta-ontology/blob/main/diagrams/provenance.png?raw=true) + +The proposed pattern is straightforward and comprises the class `core:Reference`, which describes the source of the reference (using the class `core:Source`) and the method used to obtain the annotation (using the class `core:SourceMethod`). +Additionally, the datatype properties `core:confidence` and `core:retrievedOn` describe the confidence of the annotation and the date it was produced, respectively. + +[4] TODO \ No newline at end of file diff --git a/content/_spice-h2020/ecosystem/content/_polifonia-project/ontology-network/website/docs/music-meta/getting-started/music-performance.md b/content/_spice-h2020/ecosystem/content/_polifonia-project/ontology-network/website/docs/music-meta/getting-started/music-performance.md new file mode 100644 index 00000000..698d05ee --- /dev/null +++ b/content/_spice-h2020/ecosystem/content/_polifonia-project/ontology-network/website/docs/music-meta/getting-started/music-performance.md @@ -0,0 +1,23 @@ +--- +sidebar_position: 3 +--- + +# Create a Performance + +The realisation of a `mm:MusicEntity` is exemplified by `mm:MusicalPerformance`, describing a performance that can be either live (`mm:LivePerformance`) or in a studio (`mm:StudioPerformance`). +As illustrated in the figure below, the place and time interval of a performance are described by `core:Place` and `core:TimeInterval` -- involving one or more `mm:MusicArtist`s (optionally with a specific role). +In turn, a performance may create a new `mm:MusicEntity` if the execution differs significantly from the original version. + +![Example banner](https://github.com/polifonia-project/music-meta-ontology/blob/main/diagrams/performance.png?raw=true) + +A Music Entity can also be recorded by means of a `mm:RecordingProcess`, which is a subclass of `mm:CreativeProcess` that allows for specifying location, time interval and persons involved in recording the song. +This makes it possible to describe information about both the production (e.g., producers) and the technical aspects of it (e.g., sound engineer, equipment used). +The recording process produces a `mm:Recording`, which is contained in a `mm:Release`. + +Information about the broadcasting of a recording is modelled through the `mm:BroadcastingSituation` class (an instance of the Situation ODP [3], which describes when and where the song was broadcast, and by which broadcaster (`mm:Broadcaster`). + +## Publication and licensing +The `mm:PublicationSituation` class describes information about the publication of a release, which is common to the publication of a `mm:Score` (see figure above). +For both a release and a score, it describes when and where they were published, and by a `mm:Publisher`. + +Licence information is described by the `mm:License` class, which applies to records, releases and scores. \ No newline at end of file diff --git a/content/_spice-h2020/ecosystem/content/_polifonia-project/ontology-network/website/docs/music-meta/intro.md b/content/_spice-h2020/ecosystem/content/_polifonia-project/ontology-network/website/docs/music-meta/intro.md new file mode 100644 index 00000000..b4b38782 --- /dev/null +++ b/content/_spice-h2020/ecosystem/content/_polifonia-project/ontology-network/website/docs/music-meta/intro.md @@ -0,0 +1,38 @@ +--- +sidebar_position: 1 +--- + +# Introduction + +[![Static Badge](https://img.shields.io/badge/pylode-online-purple?style=plastic)](pathname:///pylode/music-meta.html) + +A music analyst, a computational musicologist, a music librarian, and a data engineer are working on a joint project. +They need to contribute data from various musical sources, ranging from music libraries, annotated corpora and tune books, to audiovisual archives, radio broadcasts, and music catalogues. +All data is eventually merged/aggregated as interconnected corpora, and linked to online music databases (e.g. MusicBrainz, Discogs) and knowledge bases (e.g. Wikidata). +This creates opportunities to link cultural heritage artefacts to music industry data (streaming services, music professionals, etc.) and viceversa. + +This plot subsumes a recurring challenge for musical heritage projects like Polifonia. +Besides the individual requirements of each stakeholder - possibly rooted in different music genres, periods and datasets, a fundamental requirement is the **interoperability** of music metadata. + + +## What is so special with metadata? +Music metadata (alias bibliographic, or documentary music data) is used to consistently identify and describe musical works, their artists, recordings, and performances. +For music industry, it allows for efficient management and distribution of music, which facilitate search and recommendation. +When metadata is accurate, it ensures that artists receive proper credit and compensation. +For musical heritage, metadata allows for the preservation and dissemination of musical works and traditions, but also aid in the research and study of music history and culture . +When integrating both views, metadata can help to promote diversity and inclusivity in the music industry by highlighting lesser-known genres and artists, while integrating information and artefacts of cultural interest. + + +## Why another music ontology? + +*What about reusing existing ontologies, like the Music Ontology (MO) and the DOREMUS ontology?* +Modelling music metadata across different genres and historical periods, to accommodate various use cases over heterogeneous data sources poses a number of challenges. +First of all, it requires a perspective that harmonises all requirements from different stakeholders - to design a model that can be tailored to different data sources rather than to a single type of dataset. + +- **Challenge 1: Domain specificity hampers interoperability**. MO leans towards modelling discographic data with a focus on contemporary music, whereas DOREMUS is inherently rooted in classical music. Nevertheless, when drifting from discographic data and classical music, or attempting to reuse both models, addressing e.g. cultural heritage requirements while fostering interoperability becomes difficult. +- **Challenge 2: Expressivity is needed at different levels**. Another requirement for interoperability and reuse across various data sources is providing expressivity at different degrees, i.e. the possibility to conveniently describe music metadata at the right level of detail. One data source may have granular/detailed information that requires high semantic expressivity (a composition process spread over different time, places, and involving more artists); whereas others may have basic or incomplete data (only the name of an artist is known). +- **Challenge 3: Provenance is fundamental for data integration**. Accounting for provenance is a central requirement for both cultural heritage and music industry. This becomes fundamental when integrating Knowledge Graphs from different datasets and stakeholders - as every single bit of data (each triple) should be attributable to a dataset/KG. Provenance is also needed within the context of a single dataset, at least for claims and links. + +> For more info, please check out our GitHub repository and the upcoming manuscript. + +![Overview of Music Meta](https://raw.githubusercontent.com/polifonia-project/music-meta-ontology/main/diagrams/music_meta.png) \ No newline at end of file diff --git a/content/_spice-h2020/ecosystem/content/_polifonia-project/ontology-network/website/docs/music-projection/intro.md b/content/_spice-h2020/ecosystem/content/_polifonia-project/ontology-network/website/docs/music-projection/intro.md new file mode 100644 index 00000000..8d68bf73 --- /dev/null +++ b/content/_spice-h2020/ecosystem/content/_polifonia-project/ontology-network/website/docs/music-projection/intro.md @@ -0,0 +1,19 @@ +--- +sidebar_position: 1 +--- + +# Introduction + +[![Static Badge](https://img.shields.io/badge/pylode-online-purple?style=plastic)](pathname:///pylode/music-projection.html) + +Describing the content of a musical composition is a very difficult task. Providing the definition of a musical concept, when possible, requires a meticoulus that is very hard even for experts of that field. For instance a *chord* is a musical concept that is generally well known to experts as well as novices. Despite its popularity, an unambigous definition for *chord* is still distiputed and on going research is focused in finding such definition. +The issue however is not on the concept itself, a *chord*, but on its inherent ambiguity. +Music evolves hand in hand with humans and the definition of musical concepts drifts at the same pace. It is inherently impossibile to find a uniquely and objectively correct definition of *chord* since it is deeply rooted in the context in which such *chord* will be used in the first place. + +While currently we might refer to a *chord* as a stack of notes played at the same time, the same definition does not apply when one wants to study music from the Renaissance. The term *chord* might refer to the same conceptual idea, but the semantics of the term changes depending on the context. +The inverse can also be true. When talking about rhythm in electronic music, we might refer to a rhythmic figure as *four-on-the-floor*. It is unlikely to use such term when describing a rhythmic figure from Baroque music, even though they are perceptually the same exact rhythm. + +The underlying intuition is that musical concepts are strictly related to human perception and are thus uniquely identifiable and classifiable. Depending on the context within which they are used, their synctactical and semantical definition can change. We refer to this phenomena as a projection of a musical entity. +The Music Projection Ontology allows the definition of **foundational music concepts** that are recognised as stable concepts in history by musicologists. +The aim of the Music Projection Ontology is hence to provide a suitable vocabulary that can be used by experts to formulate their own definition of a musical concept. By relying on a common vocabulary, it is possible to ultimately analyse a musical composition by means of different projections of the same concept. +Relying on the Music Projection Ontology enables expert to seamleassly **integrate** together different theories and explore combinations of those theories that would otherwise require a deep understanding of a particular domain. \ No newline at end of file diff --git a/content/_spice-h2020/ecosystem/content/_polifonia-project/ontology-network/website/docs/music-representation/intro.md b/content/_spice-h2020/ecosystem/content/_polifonia-project/ontology-network/website/docs/music-representation/intro.md new file mode 100644 index 00000000..4734b220 --- /dev/null +++ b/content/_spice-h2020/ecosystem/content/_polifonia-project/ontology-network/website/docs/music-representation/intro.md @@ -0,0 +1,15 @@ +--- +sidebar_position: 1 +--- + +# Introduction + +[![Static Badge](https://img.shields.io/badge/pylode-online-purple?style=plastic)](pathname:///pylode/music-representation.html) + +The music domain landscape is populated by a large number of representation systems that serve different purposes. Trying to learn a new tune on the guitar? Chances are you will rely on [tabs](https://en.wikipedia.org/wiki/ASCII_tab). Are you working with a digital score? [MusicXML](https://www.musicxml.com/) is the industry standard. Your digital score is using some unconventional notation? Then [MEI](https://music-encoding.org/) is what you are looking for. And if you just need a simple way of notating a musical piece, [Lead sheet](https://en.wikipedia.org/wiki/Lead_sheet) can help, or perhaps [ABC](https://abcmusicnotation.weebly.com/) notation. Writing chords in natural language can be very confusing though. [Harte notation](https://ismir2005.ismir.net/proceedings/1080.pdf) can really rescue you, and if you are theoretically-inclined, using [Roman Numerals](https://en.wikipedia.org/wiki/Roman_numeral_analysis) can bring a whole new level of knowledge to your notation. +Such a large pool of options is great for written music, it really fits many different needs. But music is not only in written form, it is mostly played by real humans. If you are trying to notate a live performance there is no need to worry, [MIDI](https://www.midi.org/) can cover that use case for you! + +It truly is a vastly populated landscape and every different notation format has countless advantages and drawbacks. Being able to exploit the best part of each method is extremely valuable, but it is very difficult to convert one format to a different one. Some information might be lost in that process, with the risk that it will be lost forever. + +Despite the apparent differences between two formats, their goal is eventually the same: notate music. When an information on a musical piece is identified, such information should be universal. Different formats might ignore some particular information while other might describe it very thouroughly, but the information is inherent to the musical piece in the first place. +The Music Representation Ontology tackles this problem by providing a simple, general and extremely flexible model that allows universal music informations to be represented in a coherent form. Each different notation format can benefit from information carried by other formats. **Interoperability** is a key comitment of the Music Representation Ontology, and this is solved by focusing on **representing musical knowledge** while retaining the flexibility and efficacy of the whole notation landscape. diff --git a/content/_spice-h2020/ecosystem/content/_polifonia-project/ontology-network/website/src/pages/markdown-page.md b/content/_spice-h2020/ecosystem/content/_polifonia-project/ontology-network/website/src/pages/markdown-page.md new file mode 100644 index 00000000..9756c5b6 --- /dev/null +++ b/content/_spice-h2020/ecosystem/content/_polifonia-project/ontology-network/website/src/pages/markdown-page.md @@ -0,0 +1,7 @@ +--- +title: Markdown page example +--- + +# Markdown page example + +You don't need React to write simple standalone pages. diff --git a/content/_spice-h2020/ecosystem/content/_polifonia-project/patterns-knowledge-graph/P2KG-Pipeline/readme.md b/content/_spice-h2020/ecosystem/content/_polifonia-project/patterns-knowledge-graph/P2KG-Pipeline/readme.md new file mode 100644 index 00000000..f43c9a65 --- /dev/null +++ b/content/_spice-h2020/ecosystem/content/_polifonia-project/patterns-knowledge-graph/P2KG-Pipeline/readme.md @@ -0,0 +1,99 @@ +--- +component-id: P2KG-Pipeline +name: P2KG - Pattern to Knowledge Graph JAMS Pipeline +description: This code takes the patterns generated by the FoNN tool in the form of pickle file and then creates knowledge graph of all the patterns found +type: Software +release-date: 01/12/2022 +release-number: v0.1.0.1-dev +work-package: +- WP3 +project: polifonia-project +demo: https://projects.dharc.unibo.it/melody/fonn/statistics_on_the_session_annotated_subset_and_meertens_tune_collections_mtcann_pattern_kg +licence: CC BY 4.0, https://creativecommons.org/licenses/by/4.0/ +links: +- https://zenodo.org/record/ TODO +related-components: +- sparql-anything +- https://github.com/smashub/choco +contributors: +- Danny Diamond +- Abdul Shahid +- James McDermott +credits: "This project has received funding from the European Union’s Horizon 2020 research and innovation programme under grant agreement N. 101004746" +--- + +# P2KG - Patterns to Knowledge Graph JAMS Pipeline +- Targeting the goals of Polifonia WP3 package, P2KG JAMS Pipeline creates the knowledge graph of the patterns generate by [FoNN](https://github.com/polifonia-project/folk_ngram_analysis). The details of directory and files are given below: +- **Directories** + - ``config`` + - ``config.yml`` => this file contains configurations related to pickle file location and JAMS annotations such as ``music_pattern_directory`` and ``corpus_name`` etc. You need to update these settings before executing the pipeline + - ``jams2rdf_config.yml`` => this file contains JAMS to RDF process settings, for example, ``rdf_directory`` instruct code where to create the rdf files. + - ``music_patterns_pickle`` + - ``thesession_27_04_23_kg_data.pkl`` => this file contains pattern information in each tune. This file must follow the following structure + - ``id_number`` => contains the tune id + - ``tune_title`` => contains the tune title information + - ``tune_family`` => tune family information + - ``feature`` => such as "diatonic pitch class" + - ``level`` => information such as "accent" or "note" level + - ``n_vals`` => n-gram information such as "(4, 5, 6)" + - ``duration_beats`` => the tune duration length, e.g. 34 + - ``locations`` => pattern information should follow this structure, Dictionay of patterns in tuple and then its locations in a the list, e.g., "{(3, 4, 3, 2): [0, 8], (4, 3, 2, 3): [1, 9]}" + - ``feature_sequence_data``=> feature sequent data, list of pitch class values, [3, 4, 3, .... ] + - ``thesession_metadata.csv`` => This file contains metadata of each tune, it should follow the following structure + - ``X`` => contains the tune id + - ``title``=> contains the tune title information + - ``Z`` => transcriber of the tune + - ``R`` => tunetype information + - ``M`` => timesignature information + - ``K`` => key of the tune + - ``score`` => content of the tune in the form of ABC notation + - ``Formatted_title`` => clean tune title's information + - ``schemas`` + - ``pattern_fonn.json`` => this is a JAMS schema file, it is required for creating proper JAMS file. For each tune (in the pickle file) a corresponding JAMS file will be generated. + - ``sparql_anything`` + - ``jams_ontology_pattern.sparql`` => this is the query the SPARQL Anything Engine require to create an RDF file for a given JAMS file. + - ``sa.jar`` => this is SPARQL Anything engine, you can download the latest release from [SPARQL-Anything GitHub link](https://github.com/SPARQL-Anything/sparql.anything/releases/tag/v0.8.0). + - ``tests`` => this folder contain test cases (TODO- to be developed) + - ``JAMS``=> .jams file will be created in this folder for a corpus, for example, all The Session corpus files will go inside "thesession" folder + - ``RDF`` => All RDF files, .ttl files will be created inside this folder. for example, all The Session corpus files will go inside "thesession" folder + +NOTE: Apart from these folders, you will find the following important .py files + + ``pattern2kg_pipeline.py`` => this is the starting file, you can start process by executing this file. It will first create all the jams files and then it will create rdf files + + ``pattern2kg_pipeline_parallel.py`` => this file can be used if you want to execute the whole process in parallel (parallelized version of the above file). However, most of the time you will not need to run this file. + + ``pickle2jams.py``=> this file is responsible for creating .jams files in the JAMS folder + + ``jams2rdf.py`` => this file is responsible for creating .ttl files in the RDF folder + +## P2KG JAMS Pipeline: +1. **P2KG - General Steps** + * 1.1. Input: Patterns generated using FoNN in the form of pickle file. + * 1.2. Process: [JAMS Annotation] (https://jams.readthedocs.io/en/stable/) - JAMS files are created using custom pattern schema, you can find in the ``schema`` folder. + * 1.3. Process: [SPARQL Anything Engine] (https://sparql-anything.cc/) - It takes JAMS file and create KG. + * 1.4. Output: Knowledge Graph of pattern is created, a demo of created KG can be accessed on [polifonia server] (https://polifonia.disi.unibo.it/fonn/sparql). + +## P2KG - Requirements + +To ensure P2KG runs correctly, please install the following libraries: + +``` pip install -r requirements.txt ``` + +## Attribution + +If you use the code in this repository, please cite this software as follow: +``` +@software{patterns2Kg_jams_pipeline_2022, + address = {Galway, Ireland}, + title = {% raw %}{{{% endraw %}P2KG} - {P}attern-2-{KG} {Knowledge Graph}}, + shorttitle = {% raw %}{{{% endraw %}P2KG}}, + url = {https://github.com/polifonia-project/P2KG}, + publisher = {National University of Ireland, Galway}, + author = {Shahid Abdul, Diamond Danny, McDermott, James and Pushkar Jajoria}, + year = {2022}, +} +``` + +## License +This work is licensed under CC BY 4.0, https://creativecommons.org/licenses/by/4.0/ diff --git a/content/_spice-h2020/ecosystem/content/_polifonia-project/patterns-knowledge-graph/README.md b/content/_spice-h2020/ecosystem/content/_polifonia-project/patterns-knowledge-graph/README.md new file mode 100644 index 00000000..2cbdc883 --- /dev/null +++ b/content/_spice-h2020/ecosystem/content/_polifonia-project/patterns-knowledge-graph/README.md @@ -0,0 +1,99 @@ +--- +container-id: patterns-knowledge-graph +name: Patterns Knowledge Graph +description: Knowledge graph containing data about patterns extracted using the [FONN tools](https://github.com/polifonia-project/folk_ngram_analysis), and software for creating that knowledge graph +type: Project +release-date: 07/06/2023 +release-number: v0.1.0 +work-package: +- WP3 +project: polifonia-project +links: +- https://github.com/polifonia-project/patterns-knowledge-graph +- https://zenodo.org/record/ (TODO) +credits: +- https://github.com/ashahidkhattak +- https://github.com/jmmcd +- https://github.com/danDiamo +funder: + - name: Horizon 2020 Framework Programme + url: https://cordis.europa.eu/programme/id/H2020-EC + grant-agreement: "https://cordis.europa.eu/project/id/101004746" +credits: "This project has received funding from the European Union’s Horizon 2020 research and innovation programme under grant agreement N. 101004746." +has-part: + - P2KG-Pipeline + - patterns-knowledge-graph-datasets +--- + +# Abstract and highlights +The Patterns Knowledge Graph contains data about patterns in folk / traditional music. The patterns are those extracted using the [FONN tools](https://github.com/polifonia-project/folk_ngram_analysis). + +* Allows exploration of patterns as linked open data +* Multiple corpora, multiple definitions of patterns (e.g. $n$=(4, 5, 6), accent-level versus note-level) +* Running public SPARQL endpoint and MELODY data stories as demonstrations. + +## Data Sources +The PatternKG currently contains pattern data extracted from two important repositories: + +* MTC-ANN, Meertens Tune Collections - Annotated. This set of 360 tunes from the Dutch folk tradition contains tune family annotations. See https://www.liederenbank.nl/mtc/ +* The Session (annotated subset). This set of 315 tunes in 10 tune families is from the Irish folk tradition. The Session itself is 40k tunes, crowd-sourced. This subset has been chosen and annotated with tune family information based on the musicological literature. See https://thesession.org/ for the original (large) collection. + +The tune family annotation of *The Session* (315 tunes) is published here for the first time. It was carried out by Danny Diamond based on the musicological literature with extensive manual checking. + + +## Requirements and installation + +1. Use `pip install -r requirements.txt` to install the necessary libraries. They are: + +``` +PyYAML +rdflib +jams +``` + +2. You will also need to install SPARQL-Anything. We are currently using version 0.6.0 (see https://github.com/polifonia-project/patterns-knowledge-graph/issues/1). Download the 0.6.0 jar file from: https://github.com/SPARQL-Anything/sparql.anything/releases, and copy it to the `sparql-anything` directory here in the `patterns-knowledge-graph` repo. + +## Running the software + +Then, you can run the software by running: + +``` +cd P2KG-Pipeline +python pattern2kg_pipeline.py +``` + +It will read the data (metadata in csv format, and pattern occurrences in pickle format) from `inputs/`. It will write out the knowledge graph to the `RDF` directory in `.ttl` files. + +We then copy this to the Polifonia server https://polifonia.disi.unibo.it/fonn/sparql, which provides the data via a public SPARQL endpoint. We also provide some data stories via MELODY: + +* https://projects.dharc.unibo.it/melody/fonn/statistics_on_the_session_annotated_subset_and_meertens_tune_collections_mtcann_pattern_kg +* https://projects.dharc.unibo.it/melody/fonn/tune_families_in_the_session_and_mtcann + +Anyone can create new MELODY data stories, using the same SPARQL endpoint. See https://projects.dharc.unibo.it/melody/ for more information. + +## Bringing your own data + +If you have pattern information and metadata representing some new corpus, you can copy it to a subdirectory in `inputs/` before starting, and it will be processed. The metadata should be in csv format, with at least an `identifiers` field (string), a `title` field (string; can be empty), and a `tune family` field (string). + + +## Citing this repository + +If you would like to use this software and knowledge graph, please cite this repository as follows: + +``` +@software{shahid_patternkg_2023, + address = {Galway, Ireland}, + title = {Pattern Knowledge Graph}, + shorttitle = {% raw %}{{{% endraw %}PatternKG}}, + url = {https://github.com/polifonia-project/patterns-knowledge-graph}, + publisher = {University of Galway}, + author = {Shahid, Abdul and Diamond, Danny and McDermott, James}, + year = {2023}, +} +``` + + + +[![DOI](https://zenodo.org/badge/DOI/10.5281/zenodo.8034504.svg)](https://doi.org/10.5281/zenodo.8034504) + +This Pattern KG and associated software in this repository form part of Polifonia Deliverable D3.5, part of WP3. diff --git a/content/_spice-h2020/ecosystem/content/_polifonia-project/patterns-knowledge-graph/patterns-knowledge-graph-datasets/RDF/README.md b/content/_spice-h2020/ecosystem/content/_polifonia-project/patterns-knowledge-graph/patterns-knowledge-graph-datasets/RDF/README.md new file mode 100644 index 00000000..27175eb3 --- /dev/null +++ b/content/_spice-h2020/ecosystem/content/_polifonia-project/patterns-knowledge-graph/patterns-knowledge-graph-datasets/RDF/README.md @@ -0,0 +1,26 @@ +--- +component-id: patterns-knowledge-graph-datasets +name: Patterns Knowledge Graph - Output datasets +description: Knowledge graph containing data about patterns extracted using the [FONN tools](https://github.com/polifonia-project/folk_ngram_analysis) +type: Project +release-date: 07/06/2023 +release-number: v0.1.0 +work-package: +- WP3 +project: polifonia-project +links: +- https://github.com/polifonia-project/patterns-knowledge-graph +- https://zenodo.org/record/ (TODO) +contributors: +- https://github.com/ashahidkhattak +- https://github.com/jmmcd +- https://github.com/danDiamo +funder: + - name: Horizon 2020 Framework Programme + url: https://cordis.europa.eu/programme/id/H2020-EC + grant-agreement: "https://cordis.europa.eu/project/id/101004746" +credits: "This project has received funding from the European Union’s Horizon 2020 research and innovation programme under grant agreement N. 101004746." +--- + +The Patterns Knowledge Graph contains data about patterns in folk / traditional music. The patterns are those extracted using the [FONN tools](https://github.com/polifonia-project/folk_ngram_analysis). + diff --git a/content/_spice-h2020/ecosystem/content/_polifonia-project/pitchcontext/README.md b/content/_spice-h2020/ecosystem/content/_polifonia-project/pitchcontext/README.md new file mode 100644 index 00000000..6d8d8368 --- /dev/null +++ b/content/_spice-h2020/ecosystem/content/_polifonia-project/pitchcontext/README.md @@ -0,0 +1,56 @@ +--- +component-id: pitchcontext +name: pitchcontext +description: Python module for melody analysis based on pitch context vectors. +type: SoftwareLibrary +release-date: 2023-06-02 +release-number: 0.1.9 +work-package: +- WP3 +pilot: +- TUNES +keywords: + - melody + - pitch analysis +changelog: +licence: +release link: +--- + +[![DOI](https://zenodo.org/badge/DOI/10.5281/zenodo.8020644.svg)](https://doi.org/10.5281/zenodo.8020644) + +# pitchcontext +Python module for melody analysis based on pitch context vectors. + +## Prerequisites: +- lilypond installed and in command line path. +- convert (ImageMagick) installed and in command line path. +- kernfiles and corresponding .json files with melodic features. + +The .json files need to be formatted according to the standard of [MTCFeatures](https://pvankranenburg.github.io/MTCFeatures/melodyrepresentation.html). + +## Installation +The latest release of the pitchcontext module can be installed from pypi: +``` +$ pip install pitchcontext +``` + +The development version can be installed by cloning the repository and by using the provided pyproject.toml and poetry. In root of the rep do: +``` +$ poetry install +``` +This creates a virtual environment with pitchcontext installed. + +## Examples +Requires a Python3 environment with both pitchcontext and streamlit installed. +Four examples are provided: +- apps/st_dissonance.py +- apps/st_novelty.py +- apps/st_unharmonicity.py +- apps/st_impliedchords.py + +To run: +``` +$ streamlit run st_dissonance.py -- -krnpath -jsonpath +``` +The -- is needed to pass the following arguments to the python script. diff --git a/content/_spice-h2020/ecosystem/content/_polifonia-project/polifonia-lexicon/README.md b/content/_spice-h2020/ecosystem/content/_polifonia-project/polifonia-lexicon/README.md new file mode 100644 index 00000000..02b0e0f1 --- /dev/null +++ b/content/_spice-h2020/ecosystem/content/_polifonia-project/polifonia-lexicon/README.md @@ -0,0 +1,27 @@ +--- +component-id: Polifonia-Lexicon +name: Polifonia Lexicon - The Polifonia Multilingual WordNet +description: Annotation files for the Polifonia musical lexicon in English (EN), French (FR), Italian (IT), Spanish (ES), German (EN) and Dutch (NL). +type: Lexicon +release-date: 22/06/2022 +release-number: v1.0 +work-package: +- WP4 +licence: CC BY +links: +- https://github.com/polifonia-project/Polifonia-Lexicon +credits: +- https://github.com/roccotrip +- https://github.com/EleonoraMarzi +--- + +# Polifonia Lexicon + +### The Polifonia Multilingual WordNet + +This repository contains the annotation files for the Polifonia musical lexicon in English (EN), French (FR), Italian (IT), Spanish (ES), German (EN) and Dutch (NL). The lexicon was created selecting from **[BabelNet domains](http://lcl.uniroma1.it/babeldomains/)** all the **[WordNet](https://wordnet.princeton.edu)** musical synsets. + +V1. BabelNet domains synsets were manually validated, discarding nonpertinent concepts and lexicalizations. + +V2. Missing lexicalizations were added by professional translators. + diff --git a/content/_spice-h2020/ecosystem/content/_polifonia-project/registry_app/LICENSE.md b/content/_spice-h2020/ecosystem/content/_polifonia-project/registry_app/LICENSE.md new file mode 100644 index 00000000..afad3ea7 --- /dev/null +++ b/content/_spice-h2020/ecosystem/content/_polifonia-project/registry_app/LICENSE.md @@ -0,0 +1,13 @@ +ISC License (ISC) +================================== +_Copyright 2021 marilenadaquino_ + +Permission to use, copy, modify, and/or distribute this software for any purpose with or +without fee is hereby granted, provided that the above copyright notice and this permission +notice appear in all copies. + +THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES WITH REGARD TO THIS +SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL +THE AUTHOR BE LIABLE FOR ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE +OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. diff --git a/content/_spice-h2020/ecosystem/content/_polifonia-project/registry_app/README.md b/content/_spice-h2020/ecosystem/content/_polifonia-project/registry_app/README.md new file mode 100644 index 00000000..8fdd4dd9 --- /dev/null +++ b/content/_spice-h2020/ecosystem/content/_polifonia-project/registry_app/README.md @@ -0,0 +1,10 @@ +# musoW Catalogue and CLEF (Crowdsourcing Linked Entities via web Form) + +musoW is a crowdsourced catalogue of online music resources. The catalogue is published via CLEF (*Crowdsourcing Linked Entities via web Form*), a lightweight Linked Open Data native cataloguing system tailored to small-medium crowdsourcing projects. + +[![DOI](https://zenodo.org/badge/368253077.svg)](https://zenodo.org/badge/latestdoi/368253077) +[![License: ISC](https://img.shields.io/badge/License-ISC-blue.svg)](https://opensource.org/licenses/ISC) + +**musoW** [catalogue online](https://w3id.org/musow/) + +**CLEF** [source code](https://github.com/polifonia-project/clef) and [documentation](https://polifonia-project.github.io/clef/) diff --git a/content/_spice-h2020/ecosystem/content/_polifonia-project/registry_app/ecosystem/container.md b/content/_spice-h2020/ecosystem/content/_polifonia-project/registry_app/ecosystem/container.md new file mode 100644 index 00000000..8686e56e --- /dev/null +++ b/content/_spice-h2020/ecosystem/content/_polifonia-project/registry_app/ecosystem/container.md @@ -0,0 +1,17 @@ +--- +container-id: musow-registry +name: The musoW registry +description: An online collaborative, crowdsourced, catalogue of music data on the web +type: Project +work-package: +- WP1 +project: polifonia-project +funder: + - name: European Commission + url: https://cordis.europa.eu/project/id/101004746/it + grant-agreement: "101004746" +has-part: + - musow-dataset + - musow-interface + - clef-software +--- diff --git a/content/_spice-h2020/ecosystem/content/_polifonia-project/registry_app/ecosystem/dataset.md b/content/_spice-h2020/ecosystem/content/_polifonia-project/registry_app/ecosystem/dataset.md new file mode 100644 index 00000000..9dba18bf --- /dev/null +++ b/content/_spice-h2020/ecosystem/content/_polifonia-project/registry_app/ecosystem/dataset.md @@ -0,0 +1,42 @@ +--- +component-id: musow-dataset +type: Dataset +name: musoW dataset +description: musoW is a Linked Open Dataset of music resources available on the web. Data are described according to Schema.org and are served online in a dedicated platform for authoring, publishing and searching. +work-package: +- WP1 +project: polifonia-project +resource: https://github.com/polifonia-project/registry_app/releases +demo: https://projects.dharc.unibo.it/musow/ +release-date: 2023/05/16 +release-number: v1.0 +release-link: https://github.com/polifonia-project/registry_app/releases/latest +doi: 10.5281/zenodo.5603223 +changelog: https://github.com/polifonia-project/registry_app/releases/latest +licence: +- CC0 +contributors: +- Marilena Daquino +- Enrico Daga +- Albert Merono Penuela +- Paul Mulholland +- Simon Holland +- Mathieu d'Aquin +related-components: +- generated-by: + - clef-software +- persona: + - Laurent +- story: Laurent#1_MusicArchives +- reuses: + - "Schema.org https://schema.org" +bibliography: +- main-publication: "Daquino, M., Wigham, M., Daga, E., Giagnolini, L., & Tomasi, F. (2023). Clef. a linked open data native system for crowdsourcing. JOCCH. DOI: https://dl.acm.org/doi/10.1145/3594721 " +- publication: "Daquino Marilena et al. 2017. Characterizing the Landscape of Musical Data on the Web: state of the art and challenges. In Second Workshop on Humanities in the Semantic Web - WHiSe II, 21-25 Oct 2017, Vienna, Austria." +--- + +# musoW + +musoW, music data on the web is a Linked Open Data registry of music resources available on the web. It includes extensive descriptions of more that 500 music collections, datasets, digital libraries and software solutions relevant to music. + +See the [online catalogue](https://w3id.org/musow/) diff --git a/content/_spice-h2020/ecosystem/content/_polifonia-project/registry_app/ecosystem/interface.md b/content/_spice-h2020/ecosystem/content/_polifonia-project/registry_app/ecosystem/interface.md new file mode 100644 index 00000000..3176d052 --- /dev/null +++ b/content/_spice-h2020/ecosystem/content/_polifonia-project/registry_app/ecosystem/interface.md @@ -0,0 +1,37 @@ +--- +component-id: musow-interface +type: UserInterface +name: musoW online catalogue +description: musoW is the online registry of music resources available on the web. It serves the musoW dataset via a number of interfaces for browsing and searching. The interface also allows editing (modifying existing records or creating new ones), searching and querying (via a dedicated SPARQL endpoint). The interface is built on top of CLEF, a LOD-native web application for crowdsourcing. +work-package: +- WP1 +project: polifonia-project +resource: https://github.com/polifonia-project/registry_app/releases +demo: https://projects.dharc.unibo.it/musow/ +release-date: 2023/05/16 +release-number: v1.0 +release-link: https://github.com/polifonia-project/registry_app/releases/latest +doi: 10.5281/zenodo.5603223 +changelog: https://github.com/polifonia-project/registry_app/releases/latest +licence: +- CC0 +contributors: +- Marilena Daquino +related-components: +- generated-by: + - clef-software +- persona: + - Laurent +- story: Laurent#1_MusicArchives +- serves: + - musow-dataset +bibliography: +- main-publication: "Daquino, M., Wigham, M., Daga, E., Giagnolini, L., & Tomasi, F. (2023). Clef. a linked open data native system for crowdsourcing. JOCCH. DOI: https://dl.acm.org/doi/10.1145/3594721" +- publication: "Daquino Marilena et al. 2017. Characterizing the Landscape of Musical Data on the Web: state of the art and challenges. In Second Workshop on Humanities in the Semantic Web - WHiSe II, 21-25 Oct 2017, Vienna, Austria." +--- + +# musoW + +musoW, music data on the web is a Linked Open Data registry of music resources available on the web. It includes extensive descriptions of more that 500 music collections, datasets, digital libraries and software solutions relevant to music. + +See the [online catalogue](https://w3id.org/musow/) diff --git a/content/_spice-h2020/ecosystem/content/_polifonia-project/rulebook/CHAMPIONS.md b/content/_spice-h2020/ecosystem/content/_polifonia-project/rulebook/CHAMPIONS.md new file mode 100644 index 00000000..a30492a6 --- /dev/null +++ b/content/_spice-h2020/ecosystem/content/_polifonia-project/rulebook/CHAMPIONS.md @@ -0,0 +1,65 @@ +--- +component-id: rulebook-champions +name: Polifonia Ecosystem Champions +description: List of reference administrators of repositories relevant to the Polifonia Ecosystem +type: Documentation +work-package: +- WP1 +licence: +- CC0 +related-components: +- extends: + - rulebook +--- +# Champions + +| Repository | Champion | +| --------------------------------------------------------------------------------- | -------------------------------------------------------------- | +| https://github.com/polifonia-project/rulebook | https://github.com/enridaga | +| https://github.com/polifonia-project/ecosystem | https://github.com/raphaelfournier | +| https://github.com/polifonia-project/sonar2021_data_transformation | https://github.com/phivk | +| https://github.com/polifonia-project/patterns | @deprecated | +| https://github.com/polifonia-project/registry_app | https://github.com/marilenadaquino | +| https://github.com/polifonia-project/clef | https://github.com/marilenadaquino | +| https://github.com/polifonia-project/dashboard | https://github.com/marilenadaquino | +| https://github.com/polifonia-project/web_portal | https://github.com/marilenadaquino | +| https://github.com/polifonia-project/comparative-measure | @deprecated | +| https://github.com/polifonia-project/musical-performance | @deprecated | +| https://github.com/polifonia-project/folk_ngram_analysis | https://github.com/danDiamo | +| https://github.com/polifonia-project/harmonic-similarity | @deprecated | +| https://github.com/polifonia-project/meetups_pilot | https://github.com/albamoralest | +| https://github.com/polifonia-project/meetups-knowledge-graph | https://github.com/albamoralest | +| https://github.com/polifonia-project/meetups_corpus_collection | https://github.com/albamoralest | +| https://github.com/polifonia-project/organs-dataset | https://github.com/pvankranenburg | +| https://github.com/polifonia-project/pitchcontext | https://github.com/pvankranenburg | +| https://github.com/polifonia-project/tunes-dataset | https://github.com/pvankranenburg | +| https://github.com/polifonia-project/tunes-code | https://github.com/pvankranenburg | +| https://github.com/polifonia-project/OCR | https://github.com/andreamust | +| https://github.com/polifonia-project/facets-search-engine/ | https://github.com/raphaelfournier | +| https://github.com/polifonia-project/licences | https://github.com/enridaga | +| https://github.com/polifonia-project/docuemntary-evidence-benchmark | https://github.com/enridaga | +| https://github.com/polifonia-project/sparql-anything-cookbook | https://github.com/enridaga | +| https://github.com/polifonia-project/external-components | https://github.com/enridaga | +| https://github.com/polifonia-project/stories | https://github.com/enridaga | +| https://github.com/polifonia-project/XD-Testing | https://github.com/fiorelaciroku | +| https://github.com/polifonia-project/textual-corpus-population | https://github.com/arianna-graciotti | +| https://github.com/polifonia-project/polifonia-lexicon | https://github.com/arianna-graciotti | +| https://github.com/polifonia-project/broadcast-concerts-knowledge-graph | https://github.com/mwigham | +| https://github.com/polifonia-project/patterns-knowledge-graph | https://github.com/jmmcd | +| https://github.com/polifonia-project/tonalities_pilot | https://github.com/guillotel-nothmann | +| https://github.com/polifonia-project/music-analysis-ontology | https://github.com/guillotel-nothmann | +| https://github.com/polifonia-project/ontology-network | https://github.com/valecarriero https://github.com/andreamust https://github.com/jonnybluesman | +| https://github.com/polifonia-project/music-meta-ontology | https://github.com/jonnybluesman https://github.com/andreamust https://github.com/valecarriero | +| https://github.com/polifonia-project/core | https://github.com/valecarriero | +| https://github.com/polifonia-project/instrument | https://github.com/valecarriero https://github.com/jonnybluesman | +| https://github.com/polifonia-project/source | https://github.com/valecarriero https://github.com/jonnybluesman | +| https://github.com/polifonia-project/music-algorithm-ontology | https://github.com/jonnybluesman https://github.com/n28div https://github.com/andreamust | +| https://github.com/polifonia-project/cometa-ontology | https://github.com/jonnybluesman https://github.com/andreamust https://github.com/n28div | +| https://github.com/polifonia-project/music-representation-ontology | https://github.com/n28div https://github.com/andreamust | +| https://github.com/polifonia-project/music-annotation-ontology | https://github.com/andreamust | +| https://github.com/polifonia-project/music-projection-ontology | https://github.com/n28div https://github.com/andreamust | +| https://github.com/polifonia-project/tunes-ontology | https://github.com/jonnybluesman https://github.com/andreamust | +| https://github.com/polifonia-project/meetups-ontology | https://github.com/albamoralest | +| https://github.com/polifonia-project/scorelib-ontology | https://github.com/rigaux | +| https://github.com/polifonia-project/bells-ontology | https://github.com/valecarriero | +| https://github.com/polifonia-project/organs-ontology | https://github.com/fciroku | diff --git a/content/_spice-h2020/ecosystem/content/_polifonia-project/rulebook/README.md b/content/_spice-h2020/ecosystem/content/_polifonia-project/rulebook/README.md new file mode 100644 index 00000000..5b65d7ce --- /dev/null +++ b/content/_spice-h2020/ecosystem/content/_polifonia-project/rulebook/README.md @@ -0,0 +1,148 @@ +--- +component-id: rulebook +name: Polifonia Ecosystem Development Rulebook +description: Guidelines, recommendations, and norms on how to contribute to the Polifonia Ecosystem. +type: Documentation +release-date: 21/04/2022 +release-number: v2.1 +release-link: TODO +work-package: +- WP1 +licence: +- Cc010Universal +resource: https://github.com/polifonia-project/rulebook +contributors: +- https://github.com/enridaga +- https://github.com/raphaelfournier +related-components: +- reuses: + - rulebook-schema + - ontology-rulebook +--- +# Rulebook +Guidelines, recommendations, and norms on how to contribute to the Polifonia Ecosystem. + +## Guidelines +### When to create a repository? +Create a GH repository whenever there is an activity which leads to the production of a *component* of the *Polifonia Ecosystem*. + +### Do I really need to create a repository for anything I do? +No. But as soon as the work is discussed or presented in a meeting a repository should be already there, or follow straight after! +A repository with annotated component descriptions (see later) is mandatory for components mentioned in official deliverables. + +### What if a repository already exists somewhere else? +You don’t need to fork the repository in the Polifonia organisation. External components can be described (with annotations) in the repository [external-components](https://github.com/polifonia-project/external-components/). + +### Champion +Each repository must have a champion. Champions need to be annotated in the [CHAMPIONS.md](CHAMPIONS.md) file. + +### Discussion and decisions +Discussions can happen anywhere at anytime. However, decisions that impact the development of the component **MUST** be logged within an Issue (a Github issue, example) and motivated. + +If the decfision is not being recorded in an Issue, **it never happened**. + +### Tracking changes (commits) +Commit messages are mandatory and must reference at least one Issue. A good commit message is `Added folder XYZ with data from QWE, see also #432` where `#432` is the issue number in the same repository. You can also reference any URL in commit messages, please see GitHub documentation for examples. The more you link, the better. + +Useful readings on best practices: + + - https://gist.github.com/luismts/495d982e8c5b1a0ced4a57cf3d93cf60#file-gitcommitbestpractices-md + - https://medium.com/@danielfeelfine/commit-verbs-101-why-i-like-to-use-this-and-why-you-should-also-like-it-d3ed2689ef70 + +This is a [bad commit message](https://github.com/polifonia-project/rulebook/commit/78fb11bbe0fee670fea70dc3f3cf4bf096ab3513) +This is a [good commit message](https://github.com/polifonia-project/rulebook/commit/60dc07702fd6aaf86b029da0c5f873f77f36313e) + +### Tracking Progress Issue +Progress on the development of each component MUST be reported in the Issues section periodically. +Each repository **SHOULD** have a single **Tracking Progress Issue** for general progress update. +A simple reporting template can be a bullet list in three sections: Progress, Problems, and Perspectives (3P). + +The 3P are: + + - Progress: what concrete work has been done since the last update. + - Problems: anything that is slowing or blocking progress, or it is expected to do so. + - Perspectives: what progress is expected going forward, including plans that have been made to face any of the problems (if any). + +Please note that the Tracking progress issue is only for updates. Detailed, task-based issues should be used for referencing changes (commits) and can be linked in the Tracking Progress Issue. + +Examples: + +- [Tracking progress issue (Rulebook)](https://github.com/polifonia-project/rulebook/issues/7) +- [Tracking progress issue (External Components)](https://github.com/polifonia-project/external-components/issues/1) + +### Naming conventions +Some naming conventions have been discussed, feel free to contribute to the discussion [here](https://github.com/polifonia-project/rulebook/issues/2) + +For repositories + + - Avoid including “Polifonia” in the name (e.g. `ecosystem` rather then `polifonia-ecosystem`) + - Avoid acronyms (`ontology-network` instead of `ON`) + +### Branches +Use branches for managing different versions of the code / components. Avoid creating a branch for each sub-system (e.g. /datasets /ui etc... Instead, create different repositories. + +### Releases +Use Semantic Versioning for release numbers, and follow the GitHub workflow for releasing. + +Register your repository on Zenodo, by activating the related GitHub Action. See [this guide](https://guides.github.com/activities/citable-code/). + +## Contributing to the Ecosystem +### What is an Ecosystem *component*? +Any resource produced by or used in the research activity. See the [reference documentation](https://github.com/reeco-framework/reeco-annotation-schema/blob/main/schema/README.md) for a list of component types. + +### What is an Ecosystem *container*? +Components can be grouped in containers, representing a research activity (e.g. a project). See the [reference documentation](https://github.com/reeco-framework/reeco-annotation-schema/blob/main/schema/README.md) for a list of container types. + +Tge Polifonia Ecosystem includes by default the following containers. + +Containers of type Project: + +- polifonia-project + +Containers of type WorkPackage + +- WP1 +- WP2 +- WP3 +- WP4 +- WP5 +- WP6 +- WP7 +- WP8 + +Containers of type Pilot + +- TUNES +- BELLS +- INTERLINK +- MUSICBO +- TONALITIES +- MEETUPS +- CHILD +- ORGANS +- ACCESS +- FACETS + + +### Polifonia Ecosystem Website +A repository contains the development work for at least 1 component in the **Polifonia Ecosystem**. One markdown text file should expose annotations (metadata) relative to a single component included in the repository. For example, a component-name.md file using the annotation schema of the Polifonia Ecosystem (the file can have any name). A repository can include multiple annotated files, hence expose multiple components. +Those annotations will be used by the [Polifonia Ecosystem website](https://github.com/polifonia-project/ecosystem). +This website will provide a user interface for navigating through the Polifonia Ecosystem (with aggregation pages, tags, etc). +Please note that the Polifonia ecosystem website uses the content of Github repositories as is, hence the need for good quality annotations / documentation. + +### Developing Schema Components Annotations +The annotations should be written at the top of the markdown file, between 2 “---” lines. The markup format is YAML (mostly a “key: value” format, see also example at the top of this file). The reference documentation of the annotation schema can be found at [this link](https://github.com/reeco-framework/reeco-annotation-schema/blob/main/schema/README.md). Developers can use this service to test the YAML code: https://jsonformatter.org/yaml-validator . + +### Process towards ecosystem releases + +- Champions curate releases with project-specific frequency and rationale +- - Releases must be linked to Zenodo and the related Polifonia Community +- TB calls for next Ecosystem Release +- Champions reply giving details about version number and expected deadline (if any) +- Champions ensure component metadata is accurate +- Ecosystem Website prepare release candidate +- TB tests and validates Ecosystem Website release candidate +- Ecosystem released + +## How to contribute to the Rulebook +Please open an issue with proposals or questions about the rulebook! diff --git a/content/_spice-h2020/ecosystem/content/_polifonia-project/rulebook/deliverable_guidelines.md b/content/_spice-h2020/ecosystem/content/_polifonia-project/rulebook/deliverable_guidelines.md new file mode 100644 index 00000000..b7693d55 --- /dev/null +++ b/content/_spice-h2020/ecosystem/content/_polifonia-project/rulebook/deliverable_guidelines.md @@ -0,0 +1,158 @@ +# Polifonia Software Deliverable Guidelines + + + +# Table of contents + + * [Report](#report) + * [Summary](#summary) + * [Exploitability](#exploitability) + * [Future work](#future-work) + * [Github](#github) + * [Checklist](#checklist) + * [Github documentation](#github-documentation) + * [Testing](#testing) + * [Reviewing](#reviewing) + +# Report + +**For each software** described in the deliverable, include the following three sections: **Summary**, **Exploitability, Future work**. + +### Summary + +A table including the following fields, partly based on the [schema.md](http://schema.md) in the rulebook ([link](https://github.com/polifonia-project/rulebook/blob/main/schema.md)). + +```yaml +--- +id: either a Zenodo DOI or any other persistent handler +type: Software | Data +title: attibuted title of the software +credits: authors and contributors +description: a short abstract (goal, motivation, problem, future and current evaluation methods) max 200 words +source code: the URL of the source code +documentation: link to documentation of the software (e.g. README.md) +release: link to either release or pre-release +licence: if restrictive license, you are required to explain why +running instance: e.g. a demo, a web app based on the software, a jupyter notebook showing how to run the code and few examples of usage +bibliography: peer-reviewed works or reports in which the software is presented. +related deliverables: link to Polifonia deliverables (e.g. reports) in which methods/software are detailed or relevant (see comment below) +--- +``` + +*NOTE: In **related deliverables,** include as many references as possible to documents (deliverables or online documentation) that:* + +- *detail methods used by the software and/or to create the data* +- *reuse the software/dataset (e.g. deliverables by other partners that reuse your work - or are going to)* + +*When including the link to documents, please briefly explain what aspects these documents address, e.g. `DX.X (full description of methods, context of usage); DY.Y (software reuse in task Z.Z)`* + +### Exploitability + +- Make clear the interconnection between the software and the overall project objectives +- Introduce the context in which the software / dataset is used (the task and the partners) +- Explain who is going to benefit from it (partners in Polifonia, potential re-users, stakeholders already identified). + +*NOTE: if the software / dataset is already detailed in another deliverable, this report can be just a short reminder, in case different reviewers are assigned to deliverables. In any case, provide all the necessary context information needed to understand your work without expecting the reviewer to dive into your code base.* + +### Future work + +- detail how you intend to **continue developing** the software component or enriching the dataset +- explain **evaluation** methods, even if not implemented yet + +--- + +# Github + +Please, make sure that your repository complies with [the rulebook](https://github.com/polifonia-project/rulebook), follow instructions in the **Checklist** and add suggestions for the **Documentation** on the repository. + +## Checklist + +- include the following header to make it findable by the [Polifonia ecosystem](https://github.com/polifonia-project/ecosystem). + +```yaml +--- +id: +name: +brief-description: +type: +release-date: +release-number: +work-package: +pilot: +keywords: + - kw1 + - kw2 +changelog: +licence: +release link: +image: +logo: +demo: +links: + - link +running-instance: +credits: +related-components: + - dataset + - component-id-1 + - component-id-2 + - cli + - cli-component-id1 + - cli-component-id2 + - stories + +bibliography: + - oneref + - another ref + +--- +``` + +- by the due date of the deliverable you must deposit the Github repository on **Zenodo** (see guidelines to start [Zenodo-github]() sync) +- make a release of the code (so that the Zenodo sync can start) +- include the **DOI** of Zenodo in evidence in the `README.md` file of the repository and in the **report** +- include the **license** (e.g. a comment in the code or in the `LICENSE.md` file, see [guidelines](https://www.freecodecamp.org/news/how-open-source-licenses-work-and-how-to-add-them-to-your-projects-34310c3cf94/)) + +## Github documentation + +Please, refer to [Awesome-READMEs](https://github.com/matiassingers/awesome-readme) for a list of exemplar curated README files. +Here a [good example of README file for software (librosa python library)](https://github.com/librosa/librosa) and a [minimal template of README file for datasets](https://gist.github.com/shashvatshah9/5d587605cd087182ccffb46b6cf9e449) + +The documentation should include the following sections. + +### Introduction + +- abstract and highlights of the software +- reference to Polifonia deliverable of type OTHER/SOFTWARE (name and URL if available) +- citation of the software repository +- license +- Zenodo DOI + +### Information on installation and setup + +- either package **requirements** (e.g. `requirements.txt`) +- or add instructions to recreate the software environment (e.g. conda environment, docker, installer) +- [if applicable] list data sources to be downloaded (with links) + +### Running and examples + +- Instructions on how to **run the code** +- add either a **Jupyter notebook** or **code samples** with examples on how to run the code, features, and output - so that the reviewer sees exactly what you want her/him to see and avoid them to dive into the code + +## Testing + + + + It is good practice to start planning for testing from the very first moment the software component is engineered — before any actual coding takes place. Designing **test cases** for each function-method that is implemented, together with a simple framework allowing for their automatic execution is desirable. Nevertheless, the way the testing strategy is implement pretty much depends on the type of software that is developed. Therefore, this last guideline is more of an invitation to deliver a testing methodology, as part of the codebase, rather than enforcing a specific strategy. Regardless of the software-specific methodology, please make sure to provide all the relevant instructions to configure and run the testing scripts. + +--- + +# Reviewing + +There is a useful checklist for *reviewing software deliverables* [here](https://liveunibo.sharepoint.com/:w:/r/sites/polifonia/Shared%20Documents/Deliverables/software-deliverable-checklist.docx?d=w15bea5d87fc34a1a812c8cad237aecbd&csf=1&web=1&e=03keKc). diff --git a/content/_spice-h2020/ecosystem/content/_polifonia-project/rulebook/ontology-KG-development-documentation-guidelines.md b/content/_spice-h2020/ecosystem/content/_polifonia-project/rulebook/ontology-KG-development-documentation-guidelines.md new file mode 100644 index 00000000..c3f83779 --- /dev/null +++ b/content/_spice-h2020/ecosystem/content/_polifonia-project/rulebook/ontology-KG-development-documentation-guidelines.md @@ -0,0 +1,85 @@ +--- +component-id: ontology-rulebook +name: Ontology and knowledge graph development and documentation guidelines +description: The guidelines for the development and documentation of an ontology and a knowledge graph +type: Documentation +release-date: TBD +release-number: v1.0 +release-link: +work-package: +- WP2 +pilot: +- BELLS +- ORGANS +- MEETUPS +- MUSICBO +- CHILD +- TONALITIES +keywords: + - Ontology/KG development + - Ontology/KG documentation +related-components: +- extends: + - rulebook +--- + +# Ontology and knowledge graph development and documentation guidelines + + +## Ontology development and documentation guidelines + +### Ontology development guidelines + - The namespace of the ontology (published within the Polifonia Ontology network) must follow this rule: + - Rule: https://w3id.org/polifonia/ontology/[name-of-the-ontology + - Example: https://w3id.org/polifonia/ontology/musical-composition + - This rule is in line with the recommendations for URIs that can be found in the literature, specifically those by [ISA](https://ec.europa.eu/isa2/home_en/) [project](https://joinup.ec.europa.eu/sites/default/files/document/2013-02/D7.1.3\%20-\%20Study\%20on\%20persistent\%20URIs.pdf), since it uses a dedicated service (w3id.org), and mentions the type of the resource in the URI, i.e. ontology, along with the specific ontology module + - A preferred prefix should be indicated for each namespace +- The ontology should be annotated with labels (rdfs:label) and comments (rdfs:comment). +- The ontology should contain alignments to possible ODPs reused: + - Such alignments make it explicit which patterns have been reused, supporting a pattern-based exploration of the ontology, and guaranteeing interoperability between ontologies at the level of patterns. + - Such alignments should be expressed through [OPLaX ontology](https://w3id.org/OPLaX), which reuses and extends state-of-the-art patterns annotation languages. + - See an example of an ontology annotated with the reused patterns here: https://github.com/ICCD-MiBACT/ArCo/blob/master/ArCo-release/ontologie/catalogue/1.2/catalogue.owl +- The ontology should contain alignments to possible ontologies produced by partners of Polifonia and reused within the project. + +### Ontology documentation guidelines + +- Each ontology (published within the Polifonia Ontology network) should be stored as an RDF/OWL file in a GitHub repository. + - The name of the repository should follow this rule: + - Rule: [name-of-ontology]-ontology + - Example: https://github.com/polifonia-project/musical-performance-ontology +- The documentation of the ontology in the README.md file: + - The README.md file must contain a brief description of the scope of the ontology. + - The README.md file should contain useful statistics about the ontology in order to give an overview (number of classes, number of properties, ...) + - The README.md file must contain examples of relevant Competency Questions with respective SPARQL queries + - The README.md file must contain a graphical representation of classes and predicates. + - The README.md file should contain the licence for the reuse (hence responsible people). + +- The repository must include a separate folder containing ontology tests following eXtreme Design methodology (it is recommended to use the XDTesting tool). +- For each ontology produced by partners of Polifonia and reused within the project, but published outside the Polifonia Ontology Network, it should exist a repository linking to the ontology. +- Information about the ontology must be added/updated in the [Network Ontology GitHub repository](https://github.com/polifonia-project/ontology-network}{network-ontology). + +## Knowledge graph development and documentation guidelines + +### Knowledge graph development guidelines + +- The namespace of the knowledge graph (published within Polifonia) needs to follow the rule: + - Rule: https://w3id.org/polifonia/resource/[class-local-name]/[SHA-1 hash function of the unique attribute(s) of the individual] + - Example: https://w3id.org/polifonia/resource/Score/ec68f1e4727ecdd5272d247f3e3176743e38b469 for an entity of type Score, with the hash generated from the concatenation of the title and the composer of the composition of the score + - This rule is in line with the recommendations for URIs reported by the ISA project, that is http://\{domain\}/\{type\}/\{concept\}/\{reference\}, where the domain is a combination of the host and the relevant sector (w3id.org/polifonia/), the type is the type of resource that is being identified (resource/), the concept is the type of real world object identified ([class-local-name]), and the reference is a specific item [SHA-1 hash function of the unique attribute(s) of the individual]. See [here](https://joinup.ec.europa.eu/sites/default/files/document/2013-02/D7.1.3\%20-\%20Study\%20on\%20persistent\%20URIs.pdf) for more information. +- The knowledge graph should contains links (owl:sameAs) to the Wikidata knowledge graph. +- The knowledge graph needs to be deployed on the web through a SPARQL endpoint containing all relevant prefixes. + +### Knowledge graph documentation guidelines + +- The knowledge graph must be documented in a GitHub repository, that follows the rules already defined in the [Polifonia rulebook](https://github.com/polifonia-project/rulebook/) valid for all GitHub repositories. +- The documentation of the ontology in the README.md file: + - The README.md file must contain a brief description of the scope of the knowledge graph. + - The README.md file must contain the link to the SPARQL endpoint. + - The README.md file should contain useful statistics about the KG (number of triples, most populated classes, ...) + - The README.md file should mention the data sources of the KG + - The README.md file should contain examples of relevant Competency Questions with respective SPARQL queries + - The README.md file should contain the licence for data reuse (hence responsible people). +- A copy of data (Linked Open Data) should be linked in a dedicated folder in the repository. If the data volume exceeds 500MB, split data into different files. If data are difficult to split, use [ntriples](https://heardlibrary.github.io/digital-scholarship/lod/serialization) / [quads](https://www.w3.org/TR/n-quads/) serialisation. Otherwise, any other standard serialisation available. +- A story created with [Melody](https://projects.dharc.unibo.it/melody/), containing relevant queries that can be run on the knowledge graph, should be present in the documentation. At least one query should showcase the entity linking with Wikidata. + + diff --git a/content/_spice-h2020/ecosystem/content/_polifonia-project/rulebook/schema.md b/content/_spice-h2020/ecosystem/content/_polifonia-project/rulebook/schema.md new file mode 100644 index 00000000..6d1ed684 --- /dev/null +++ b/content/_spice-h2020/ecosystem/content/_polifonia-project/rulebook/schema.md @@ -0,0 +1,136 @@ +--- +component-id: rulebook-schema +name: Ecosystem Component Annotation Schema +description: the Ecosystem Component Annotation Schema +type: Schema +work-package: +- WP1 +licence: +- Cc010Universal +related-components: +- reuses: + - "Ecosystem Annotation Schema https://github.com/reeco-framework/reeco-annotation-schema/blob/main/schema/README.md" +- extends: + - rulebook +--- +# Ecosystem Component Annotation Schema + +A complete list of component and container types and related annotation terms can be found in the [reference documentation](https://github.com/reeco-framework/reeco-annotation-schema/blob/main/schema/README.md). + +## Use the schema + +### Components +Just copy the YAML code below and use it as the header of your component description MD file by changing terms values and removing terms that are not needed. +``` +--- +component-id: fabulous-component-source-code +type: Software +name: The Fabulous Source Code +description: Source code of The Fabulous +image: https://www.example.org/image.png +logo: https://www.example.org/logo.png +work-package: +- WP1 +- WP2 +- WP3 +- WP4 +- WP5 +- WP6 +- WP7 +- WP8 +pilot: +- TUNES +- BELLS +- INTERLINK +- MUSICBO +- TONALITIES +- MEETUPS +- CHILD +- ORGANS +- ACCESS +- FACETS +project: polifonia-project +resource: https://github.com/fabulous-inc/repo1/releases +demo: https://www.example.org/fabulous/demo +release-date: YYYY/MM/DD +release-number: v1.0-alpha +release-link: https://github.com/polifonia-project/repo1/releases/tag/v1.0 +doi: 10.5281/zenodo.000000 +changelog: https://github.com/polifonia-project/repo1/releases/tag/v1.0 +licence: +- CC-BY_v4 +- CC-BY-SA_v4 +- CC-BY-NC-ND_v4 +- Apache-2.0 +copyright: "Copyright (c) 2023 The Polifonia Project Contributors" +contributors: +- A developer +- Another contributor +related-components: +- informed-by: + - fabulous-requirements +- use-case: + - fabulous-uc +- story: + - fabulous-story +- persona: + - fabulous-persona +- documentation: + - fabulous-component-docs + - fabulous-component-tutorials +- evaluated-in: + - fabulous-evaluation +- extends: + - "A Java project Jena https://www.example.org" +- reuses: + - "Apache Camel https://camel.apache.org/" + - another-dataset +- serves: + - another-dataset +- generated-by: + - The AI code generator http://www.my-software-factory.com +- derived-from: + - this-other-component +credits: "This project has received funding from the European Union’s Horizon 2020 research and innovation programme under grant agreement N. 101004746" +--- +``` + + +### Containers +Just customise the YAML code below and use it as the header of your component description MD file by changing terms values and removing terms that are not needed. + +``` +container-id: fabulous +name: The Fabulous Project +description: The Fabulous Project is a very important part of the Polifonia ecosystem. +type: Project +work-package: +- WP1 +- WP2 +- WP3 +- WP4 +- WP5 +- WP6 +- WP7 +- WP8 +pilot: +- ThePilot +project: polifonia-project +bibliography: +- main-publication: "Brown, L. (2019). The Role of Parenting Styles in Child Development. Child Development Perspectives, 13(3), 145-153." +- publication: + - "Smith, J. (2020). The Impact of Social Media on Mental Health. Journal of Psychology and Behavioral Sciences, 15(2), 45-62." +funder: + - name: Horizon 2020 Framework Programme + url: https://cordis.europa.eu/programme/id/H2020-EC + grant-agreement: "https://cordis.europa.eu/project/id/101004746" +credits: "This project has received funding from the European Union’s Horizon 2020 research and innovation programme under grant agreement N. 101004746." +has-part: + - fabulous-component-source-code + - fabulous-docs + - fabulous-tutorials + - fabulous-evaluation + - fabulous-requirements + - fabulous-dataset + +``` diff --git a/content/_spice-h2020/ecosystem/content/_polifonia-project/source-ontology/README.md b/content/_spice-h2020/ecosystem/content/_polifonia-project/source-ontology/README.md new file mode 100644 index 00000000..7cdaa8c2 --- /dev/null +++ b/content/_spice-h2020/ecosystem/content/_polifonia-project/source-ontology/README.md @@ -0,0 +1,43 @@ +# Source Ontology +The Source ontology addresses concepts and relationships for representing sources of (music-related) information. + +[![DOI](https://zenodo.org/badge/372536364.svg)](https://zenodo.org/badge/latestdoi/372536364) +[![License: CC BY 4.0](https://img.shields.io/badge/License-CC_BY_4.0-lightgrey.svg)](https://creativecommons.org/licenses/by/4.0/) + +> 🔗 Ontology URI: [https://w3id.org/polifonia/ontology/source/](https://w3id.org/polifonia/ontology/source/) + +Source represents various sources of music-related information. These include manuscripts, textbooks, articles, interviews, reviews, comments, memoirs, etc. of different scope and format (physical, digital). The module aims to provide general support to describe information related to the creator and type of the source, the time and place when/where it was created, the context of production and usage, and the subject and goals. Although this conceptualisa- tion leans towards bibliographical sources, the module provides expressivity to indicate multimedia documents (e.g. images of scores, audio recording, video). For example, a video recording of a performance can be considered as a musical source – providing documentary evidence of a composition e.g. during an event. The module is part of the [Polifonia ontology network](https://github.com/polifonia-project/ontology-network). + +![Source module diagram](diagrams/source-module.png) + +## Competency questions addressed + +| **ID** | **Competency Question** | +|--------|--------------------------------------| +| CQ1 | Which is the subject of a source? | +| CQ2 | Which is the credibility of a source? | +| CQ3 | Which is the goal of a source? | +| CQ4 | Which is the type of a source? | +| CQ5 | Which is the context of production of a source? | +| CQ6 | Which is the context of usage of a source? | + +## Examples of SPARQL queries + +- Which is the subject of a source? + +```sparql +PREFIX src: +SELECT DISTINCT ?source ?subject +WHERE { ?source src:hasSubject ?subject . +} +``` + +## Statistics +- number of classes: 9 +- number of object properties: 14 +- number of datatype properties: 1 + + +## License + +This work is licensed under a [Creative Commons Attribution 4.0 International License](http://creativecommons.org/licenses/by/4.0/). diff --git a/content/_spice-h2020/ecosystem/content/_polifonia-project/source-ontology/header.md b/content/_spice-h2020/ecosystem/content/_polifonia-project/source-ontology/header.md new file mode 100644 index 00000000..b7cd4fa1 --- /dev/null +++ b/content/_spice-h2020/ecosystem/content/_polifonia-project/source-ontology/header.md @@ -0,0 +1,58 @@ +--- +component-id: https://w3id.org/polifonia/ontology/source/ +type: Ontology +name: Source ontology +description: Source represents various sources of music-related information +image: diagrams/source-module.png +work-package: +- WP2 +pilot: +- MEETUPS +- MUSICBO +project: polifonia-project +resource: ontology/source.owl +release-date: 13/04/2023 +release-number: v1.0 +release-link: https://github.com/polifonia-project/source-ontology +doi: 10.5281/zenodo.7919970 +changelog: https://github.com/polifonia-project/source-ontology +licence: +- CC-BY_v4 +copyright: "Copyright (c) 2023 Source Ontology Contributors" +contributors: # replace these with the GitHub URL of each contributor +- Valentina Anita Carriero +- Fiorela Ciroku +- Jacopo de Berardinis +related-components: +- informed-by: + - polifoniacq-dataset +- reuses: # any reused/imported ontology + - https://w3id.org/polifonia/ontology/core/ +- story: # any related story this ontology addresses + - Carolina#1_SourcesCrossAnalysis + - Keoma#1_RestorationAndSoundPractices + - Ortenz#1_MusicAndChildhood + - Sonia#1_ExplorationMode +- persona: # any persona this ontology addresses + - Keoma + - Carolina + - Sonia + - Ortenz +--- + + +# Source Ontology + +Source represents various sources of music-related information. These include= +manuscripts, textbooks, articles, interviews, reviews, comments, memoirs, etc. +of different scope and format (physical, digital). The module aims to provide +general support to describe information related to the creator and type of the +source, the time and place when/where it was created, the context of production +and usage, and the subject and goals. Although this conceptualisa- tion leans +towards bibliographical sources, the module provides expressivity to indicate +multimedia documents (e.g. images of scores, audio recording, video). For +example, a video recording of a performance can be considered as a musical +source – providing documentary evidence of a composition e.g. during an event. + +[Link to the website](https://github.com/polifonia-project/source-ontology) diff --git a/content/_spice-h2020/ecosystem/content/_polifonia-project/stories/.github/ISSUE_TEMPLATE/story.md b/content/_spice-h2020/ecosystem/content/_polifonia-project/stories/.github/ISSUE_TEMPLATE/story.md new file mode 100644 index 00000000..5351613a --- /dev/null +++ b/content/_spice-h2020/ecosystem/content/_polifonia-project/stories/.github/ISSUE_TEMPLATE/story.md @@ -0,0 +1,35 @@ +--- +name: Story +about: Suggest a story for Polifonia +title: '' +labels: '' +assignees: '' + +--- + +## ID of the story +Indicate here the ID of the story. + +## WP +Indicate here the WPs involved in the story. + +## Pilots +Indicate here the pilots involved in the story. + +## Priority +Indicate here the priority of the story (must have/life improver/life changer/breakthrough) + +## Persona +Indicate here the [name](url) of the Persona involved in this story. + +## Goal +- List maximum 5 keywords that represent the goal of the Persona in this story. + +## Scenario +Write here a story regarding the goal(s) of the Persona and describing how the persona's task/need/problem is solved before, during and after interaction with the resource/software/service being developed. + +## Competency questions +- List here the question(s) the persona needs the resource/software/service to answer for satisfying their task/need/problem. + +## Resources (optional) +- List here the resources (with references/links) where it is expected or known that the persona can find what she's looking for. diff --git a/content/_spice-h2020/ecosystem/content/_polifonia-project/stories/Amy_Organologist/Amy#1_OrganTrends.md b/content/_spice-h2020/ecosystem/content/_polifonia-project/stories/Amy_Organologist/Amy#1_OrganTrends.md new file mode 100644 index 00000000..0a2d847e --- /dev/null +++ b/content/_spice-h2020/ecosystem/content/_polifonia-project/stories/Amy_Organologist/Amy#1_OrganTrends.md @@ -0,0 +1,44 @@ +--- +component-id: Amy#1_OrganTrends +name: "Organ Trends" +type: Story +description: For her next research project, Amy wants to discover artistic and technical trends of organs and how these developed. +related-components: + - persona: + - Amy +keywords: + - dutch organs + - instruments + - tradition +work-package: + - WP2 +pilot: + - ORGANS +story type: + - expert driven +--- +# Organ Trends + +## Goal + +For her next research project, Amy wants to discover artistic and technical trends of organs and how these developed. The development of these trends could possibly indicate wider social trends. + +## Scenario + +During the project, Amy researches both artistic and technical trends surrounding organs in the Netherlands. Amy is interested in finding connections between different regions and time periods to see how the trends have developed. She also wants to put her findings in a historical and social context. Amy hopes that the knowledge graphs can help her identify initial trends. That way she knows where to dig deeper to understand the trends. In order to discover trends, comparisons similar to those needed by the organ advisor (Paul) are required. But besides the comparison of stops in organs made by the same organ builder, the comparisons can be related to almost every component of the organ. For example, the different arthistorical details on the fronts of the organs, or the wind supply system are interesting to compare. + +## Competency questions + +See the set of CQs for Paul#1_OrganComparison. In addition: + +CQ1: What are geographically distinct features of organs from region X? (e.g., in what differ 17th century Southern German organs from 17th century Spanish organs?). + +## Resources + +- Amy already has access to the physical as well as online library of the university where she works. +- Amy also has the entire 15 volume collection from the Dutch organ encyclopaedia. She uses the encyclopaedia mostly for reading about the arthistorical and technical aspects and developments of organs and comparing these to other organs. +- Database websites for information about organs and churches which all offer slightly different kinds of information: + - https://www.kerk-en-orgel.nl + - https://reliwiki.nl/ + - http://www.orgbase.nl + - https://geoplaza.vu.nl/cms/research/kerkenkaart/ – Kerkenkaart (churchmap) from the Vrije Universiteit Amsterdam diff --git a/content/_spice-h2020/ecosystem/content/_polifonia-project/stories/Amy_Organologist/Amy#2_OrganBuilders.md b/content/_spice-h2020/ecosystem/content/_polifonia-project/stories/Amy_Organologist/Amy#2_OrganBuilders.md new file mode 100644 index 00000000..efeb93f3 --- /dev/null +++ b/content/_spice-h2020/ecosystem/content/_polifonia-project/stories/Amy_Organologist/Amy#2_OrganBuilders.md @@ -0,0 +1,42 @@ +--- +component-id: Amy#2_OrganBuilders +name: "Organ Builders" +type: Story +description: Amy wants to assess the developments of organ builders. +related-components: + - persona: + - Amy +keywords: + - dutch organs + - instruments + - tradition +work-package: + - WP2 +pilot: + - ORGANS +story type: + - expert driven +--- +# Organ Builders + +## Goal + +Amy wants to assess the developments of organ builders. + +## Scenario + +This research includes looking into which organs an organ builder worked on and how their projects developed over time. This development can range from an increase of the size of the organ, to locality of their projects, or to the type or prestige of the project. The history section of the 15-part organ encyclopaedia is mostly used for this as this specifies year and changes that are made to the organs, but this obviously takes a lot of time and effort to research. + +## Competency questions + +Relevant CQs are all in Paul#1_OrganComparison. + +## Resources + +- Amy already has access to the physical as well as online library of the university where she works. +- Amy also has the entire 15 volume collection from the Dutch organ encyclopaedia. She uses the encyclopaedia mostly for reading about the arthistorical and technical aspects and developments of organs and comparing these to other organs. +- Database websites for information about organs and churches which all offer slightly different kinds of information: + - https://www.kerk-en-orgel.nl + - https://reliwiki.nl/ + - http://www.orgbase.nl + - https://geoplaza.vu.nl/cms/research/kerkenkaart/ – Kerkenkaart (churchmap) from the Vrije Universiteit Amsterdam diff --git a/content/_spice-h2020/ecosystem/content/_polifonia-project/stories/Amy_Organologist/readme.md b/content/_spice-h2020/ecosystem/content/_polifonia-project/stories/Amy_Organologist/readme.md new file mode 100644 index 00000000..e9de7058 --- /dev/null +++ b/content/_spice-h2020/ecosystem/content/_polifonia-project/stories/Amy_Organologist/readme.md @@ -0,0 +1,36 @@ +--- +component-id: Amy +name: "Amy" +description: "Organologist, Musicologist, and Music historian." +type: Persona +work-package: +- WP2 +long-title: "Amy" +related-components: +- story: + - Amy#1_OrganTrends + - Amy#2_OrganBuilders +project: polifonia-project +pilot: +- ORGANS +--- + +# Amy + +This is a description of the persona Amy. + +## Name +Amy + +## Age +51 + +## Occupation +Organologist / Musicologist / Music historian, researcher and professior at the music department of a university + +## Knowledge/Skills +Amy has a bachelor's degree in Musicology, a master's in History, and her Ph.D. combined these two fields. For obvious reasons, Amy is already a skilled researcher. + +## Interests +Amy's interests are primarily in church music, but she is also fascinated with relations between the music and society. + diff --git a/content/_spice-h2020/ecosystem/content/_polifonia-project/stories/Andrea_Theology_Scholar/Andrea#1_Serendipity.md b/content/_spice-h2020/ecosystem/content/_polifonia-project/stories/Andrea_Theology_Scholar/Andrea#1_Serendipity.md new file mode 100644 index 00000000..3a33f3bb --- /dev/null +++ b/content/_spice-h2020/ecosystem/content/_polifonia-project/stories/Andrea_Theology_Scholar/Andrea#1_Serendipity.md @@ -0,0 +1,58 @@ +--- +component-id: Andrea#1_Serendipity +name: Serendipity +type: Story +description: "Andrea's goal is to discover and explore sacred music written from the period of the Renaissance through the twentieth century and its unexpected links with his scholarly studies." +related-components: + - persona: + - Andrea + - musoW +keywords: + - serendipity + - organs + - sacred music +work-package: + - WP1 + - WP5 +pilot: + - ORGANS +story type: + - expert driven +--- + +# Serendipity + +## Persona +Andrea is a priest-scholar and a graduate music pianist with a specialization in organ music. + +## Goal +Andrea's goal is to discover and explore sacred music written from the period of the Renaissance through the twentieth century and its unexpected links with his scholarly studies. + +## Scenario +Andrea is been very busy studying ancient texts and history books and he would like to dedicate more time to his passion for music and organs. However, he doesn't actually know what to look for. He is fond of sacred music, classical music and organs and it would be cool for him if he could read curiosities about these topics or his favorite composers. And who knows, he might also find interesting and unexpected links to his scholarly studies without resorting to extensive study and research from many books of several libraries. + +## Competency questions + +CQ1: Can I find interesting materials without applying filters? + +CQ2: What types of resources can I find? + +CQ3: Is there a way of visualizing all the materials connected to my interests? + +CQ4: Can I keep getting suggestions in real time? + +CQ5: How can I share what I find on the site? + +CQ6: How I personalize my navigation experience without knowing the filtering criteria or the materials' characteristics? + + +## Resources +- Andrea uses: + - Electronic Periodicals and physical magazines from the Pontificia Universitas Lateranensis + - His personal collection + - Social media + +- Andrea could be helped by: + - a technology to easily explore music resources according to serendipity criteria; + - a technology to visualize the contents in a dynamic layout; + - a technology to personalize his navigation experience a priori. \ No newline at end of file diff --git a/content/_spice-h2020/ecosystem/content/_polifonia-project/stories/Andrea_Theology_Scholar/readme.md b/content/_spice-h2020/ecosystem/content/_polifonia-project/stories/Andrea_Theology_Scholar/readme.md new file mode 100644 index 00000000..d5b83676 --- /dev/null +++ b/content/_spice-h2020/ecosystem/content/_polifonia-project/stories/Andrea_Theology_Scholar/readme.md @@ -0,0 +1,38 @@ +--- +component-id: Andrea +name: "Andrea" +description: "Pianist, Organist, Priest and Theology Scholar." +type: Persona +long-title: "Andrea" +related-components: +- story: + - Andrea#1_Serendipity +project: polifonia-project +pilot: +- ORGANS + +--- + +# Andrea + +This is a description of the persona Andrea. + +## Name +Andrea + +## Age +29 + +## Occupation +- Primary role + - Theology Scholar +- Secondary roles + - Priest + - Graduate organist + +## Knowledge/Skills +Andrea is a priest and a theology scholar. He is also a graduate music pianist, with a specialization in organ music. + +## Interests +His main interest is the study of ancient texts, theology and philosophy. +He is also very passionate about organs and Christian sacred music written from the period of the Renaissance through the twentieth century. diff --git a/content/_spice-h2020/ecosystem/content/_polifonia-project/stories/Anna_Hearing-impaired/Anna#1_HearingMusic.md b/content/_spice-h2020/ecosystem/content/_polifonia-project/stories/Anna_Hearing-impaired/Anna#1_HearingMusic.md new file mode 100644 index 00000000..56f03fef --- /dev/null +++ b/content/_spice-h2020/ecosystem/content/_polifonia-project/stories/Anna_Hearing-impaired/Anna#1_HearingMusic.md @@ -0,0 +1,41 @@ +--- +component-id: Anna#1_HearingMusic +name: Hearing Music +type: Story +description: Anna wants to experience live music with the aid of haptic technology. +related-components: + - persona: + - Anna +keywords: + - hatpics + - live music +work-package: + - WP2 + - WP3 + - WP4 + - WP5 +pilot: + - ACCESS +story type: + - expert driven +--- +# Hearing Music + +## Persona +Anna + +## Goal +Anna wants to experience live music with the aid of haptic technology. + +## Scenario +Anna, who became hearing impaired in later life, attends a concert by The Blockheads at the Stables music venue with a friend Ben who is deaf from birth, and her hearing friend Caroline. In order to enhance their experience of the concert, they decide to use the haptic bracelets offered by the venue. Anna is a big fan of the bass player Norman Watt Roy, whereas her friend is interested in the drums played by John Roberts. For tonight’s performance, the Stables are offering a live automatic haptic transcription of the bass part. The Stables have also hired a musical interpreter for the hearing impaired who will be communicating the overall performance using an electronic drum kit from which a haptic feed is available. As a third option, a chest-worn transducer is available that takes the audio feed and turns the wearer’s chest cavity into a bass woofer. Anna used the bass transcription. Ben opted for the live interpretation. During ‘Hit me with your rhythm stick’ Anna grabs the hand of her hearing friend Caroline in excitement and puts it on her wrist to feel the bass rhythm. Anna and Ben sign to each other during the first two songs, and decide to swap feeds for the third. + +When they hand the equipment back in at the end of the gig, the Stables staff point out that there is a summer workshop being run for hearing impared beginner drummers using a full drum kit and bracelets on all four limbs. + +## Competency questions + +CQ1: What is the rhythm of the bassline? + +CQ2: What is the rhythm of the electronic drum kit? + +## Resources diff --git a/content/_spice-h2020/ecosystem/content/_polifonia-project/stories/Anna_Hearing-impaired/readme.md b/content/_spice-h2020/ecosystem/content/_polifonia-project/stories/Anna_Hearing-impaired/readme.md new file mode 100644 index 00000000..b9161831 --- /dev/null +++ b/content/_spice-h2020/ecosystem/content/_polifonia-project/stories/Anna_Hearing-impaired/readme.md @@ -0,0 +1,31 @@ +--- +component-id: Anna +name: "Anna" +description: "Anna is a fan of live music. She became hearing imparied in later life." +type: Persona +long-title: "Anna" +related-components: + - story: + - Anna#1_HearingMusic +project: polifonia-project +pilot: + - ACCESS +--- + +# Anna + +This is a description of the persona Anna. + +## Name +Anna + +## Age +62 + +## Occupation + +## Knowledge/Skills +She has a knowledge of popular music. She is learning sign language. + +## Interests +Anna is a fan of live music. She became hearing imparied in later life. diff --git a/content/_spice-h2020/ecosystem/content/_polifonia-project/stories/Brendan_Traditional_Musician/Brendan#1_FindTraditionalMusic.md b/content/_spice-h2020/ecosystem/content/_polifonia-project/stories/Brendan_Traditional_Musician/Brendan#1_FindTraditionalMusic.md new file mode 100644 index 00000000..9fa44f81 --- /dev/null +++ b/content/_spice-h2020/ecosystem/content/_polifonia-project/stories/Brendan_Traditional_Musician/Brendan#1_FindTraditionalMusic.md @@ -0,0 +1,52 @@ +--- +component-id: Brendan#1_FindTraditionalMusic +name: Find Traditional Music +type: Story +description: Find suitably similar traditional music and historical context from northern European corpora. +related-components: + - persona: + - Brendan +keywords: + - traditional music + - melodic similarity + - rhythmic similarity + - tunes +work-package: + - WP3 +pilot: + - TUNES +story type: +- expert driven +--- +# Find Traditional Music + +## Goal + +Find suitably similar traditional music and historical context from northern European corpora. + +## Scenario + +Brendan has received funding from the Arts Council of Ireland to create a new album of traditional music. He has an interest in combining European folk music with Irish music, and this is something that has informed both his academic and creative output. For this album, he would like to blend Irish tunes with tunes from Northern Europe. In order to do this, he first has to identify archives, performers, and recordings from these areas. He must then search for suitable pieces that would work with Irish tunes, in terms of time signatures, tempo, function, and tonality. This can be a time consuming task to search through multiple archives, and in languages that Brendan is not fluent in. If there was a portal to query and compare the similarity of know pieces of music against multiple different archives and regions, this would save time and potentially more fruitful results. Brendan then has to compile information on the history and authenticity of each tune in their context, which informs his performance practice, research interests, and the information available on the sleeve notes of his album. This knowledge will also be presented aurally at concerts and recitals. + + +## Competency questions + +CQ1: What tunes have similar geographic origin as tune X? + +CQ2: What tunes are similar to tune X, given similarity measure Y? + +CQ3: Given a set of tunes, from which collections are these tunes? + +CQ4: Given a set of tunes, what tunes are from collection X? + +CQ5: What are the metadata for collection X? + +## Resources + +## Remarks + +UI Requirements: +- Can the notation of query and results be viewed simultaneously for comparison? + +Requirements: +- The tool should provide the provenance of any given result. diff --git a/content/_spice-h2020/ecosystem/content/_polifonia-project/stories/Brendan_Traditional_Musician/readme.md b/content/_spice-h2020/ecosystem/content/_polifonia-project/stories/Brendan_Traditional_Musician/readme.md new file mode 100644 index 00000000..bd9cd000 --- /dev/null +++ b/content/_spice-h2020/ecosystem/content/_polifonia-project/stories/Brendan_Traditional_Musician/readme.md @@ -0,0 +1,32 @@ +--- +component-id: Brendan +name: "Brendan" +description: "Irish Traditional Musician/Scholar and Ethnomusicologist." +type: Persona +long-title: "Brendan" +related-components: + - story: + - Brendan#1_FindTraditionalMusic +project: polifonia-project +pilot: + - TUNES +--- + +# Brendan + +This is a description of the persona Brendan + +## Name +Brendan + +## Age +39 + +## Occupation +Traditional Musician / Lecturer in Ethnomusicology & Irish Traditional Music + +## Knowledge/Skills +Brendan is an active performer, and recently appointed as a lecturer. He comes from a family of traditional musicians, and has a thorough understanding of Irish traditional music. He has a number of solo and group recordings available. His PhD focused on traditional music, community, and identity in west Cork. + +## Interests +Beyond his own speciality of Irish traditional music, Brendan has spent considerable time immersed in Breton, Galician, and Bulgarian musical traditions. He has had success in blending these traditions with Irish music in previous recordings, and aims to broaden his horizons for upcoming performances and research. diff --git a/content/_spice-h2020/ecosystem/content/_polifonia-project/stories/Carolina_Music_Historian/Carolina#1_SourcesCrossAnalysis.md b/content/_spice-h2020/ecosystem/content/_polifonia-project/stories/Carolina_Music_Historian/Carolina#1_SourcesCrossAnalysis.md new file mode 100644 index 00000000..75e87b16 --- /dev/null +++ b/content/_spice-h2020/ecosystem/content/_polifonia-project/stories/Carolina_Music_Historian/Carolina#1_SourcesCrossAnalysis.md @@ -0,0 +1,86 @@ +--- +component-id: Carolina#1_SourcesCrossAnalysis +name: Sources Cross-Analysis +description: "Carolina has to prepare a conference for the anniversary of the birth of the composer Giacomo Antonio Perti and she needs to collect some informations about his career." +related-components: +- persona: + - Carolina +keywords: + - sources cross-analysis +work-package: + - WP2 + - WP3 + - WP4 + - WP5 +pilot: + - MUSICBO +type: Story +story type: +- expert driven +--- +# Sources Cross-Analysis + +## Persona +Carolina is a music historian with a passion for the Baroque. She is a researcher and collaborates with the International Museum and the Music Library of Bologna. + +## Goal +Carolina has to prepare a conference for the anniversary of the birth of the composer Giacomo Antonio Perti and she needs to collect some informations about his career. She knows that Giacomo Antonio Perti is the author of "Masses" and she wants to dedicate a particular section to these compositions. +- sources cross-analysis + +## Scenario +In order to accomplish her goal first Carolina must find the scores that constitute her primary sources. Thus, Carolina should find the places where these compositions were played for the first time how many musicians were involved, how many musical instruments, the names of the musicians. Carolina is also interested in the singers involved and the choir. Carolina has to find written evidence that says how these Masses were received. Carolina is also interested in finding out in which tonality the compositions were written. + +## Competency questions + +CQ1: Where was the musical composition X performed? *(musical performance)* + +CQ2: In which buildings was musical composition X performed? *(musical performance)* + +CQ3: Where was the musical composition X performed for the first time? *(musical performance)* + +CQ4: In which buildings was musical composition X performed for the first time? *(musical performance)* + +CQ5: Which performers (musicians, singers) have performed musical composition X? *(musical performance)* + +CQ6: Which performers (musicians, singers) have performed musical composition X for the first time? *(musical performance)* + +CQ7: In which scores is there evidence of musical composition X? *(evidence)* + +CQ8: In which historical documents is there evidence of musical composition X? *(evidence)* + +CQ9: In which bibliographic references is there evidence of musical composition X? *(evidence)* + +CQ10: In which historical documents is there evidence of reception of musical composition X? *(evidence)* + +CQ11: In which bibliographic references is there evidence of reception of musical composition X? *(evidence)* + +CQ12: Which is the medium of performance of musical composition X? *(musical composition)* + +CQ13: Which was the first medium of performance of musical composition X? *(musical composition)* + +CQ14: Which instruments are involved in musical composition X? *(musical composition)* + +CQ15: Which voices are involved in musical composition X? *(musical composition)* + +CQ16: In which tonality was written the musical composition X for the first time? *(musical composition)* + +CQ17: Which is/are the composer(s) of the musical composition X? *(musical composition)* + +CQ18: Who has spoken about the musical composition X? *(reception of musical composition)* + +CQ19: Who has spoken about the musical performance X? *(reception of musical performance)* + +CQ20: How was the musical composition X received? *(reception of musical composition)* + +CQ21: How was the performance of musical composition X received? *(reception of musical performance)* + +CQ22: When was a musical composition performed? *(musical performance)* + + +## Resources + +Medium Of Performance : https://www.loc.gov/marc/marbi/2011/2011-dp05.html + +Form of musical work : https://www.iflastandards.info/unimarc/terms/fom + +Terminorum Musicae Index Septem Linguis Redactus : https://archive.org/details/TerminorumMusicaeIndexSeptemLinguisRedactus/mode/2up (7 languages dictionary) diff --git a/content/_spice-h2020/ecosystem/content/_polifonia-project/stories/Carolina_Music_Historian/readme.md b/content/_spice-h2020/ecosystem/content/_polifonia-project/stories/Carolina_Music_Historian/readme.md new file mode 100644 index 00000000..479b2661 --- /dev/null +++ b/content/_spice-h2020/ecosystem/content/_polifonia-project/stories/Carolina_Music_Historian/readme.md @@ -0,0 +1,41 @@ +--- +component-id: Carolina +name: "Carolina" +description: "Researcher who collaborates with the International Museum and the Music Library of Bologna" +type: Persona +long-title: "Carolina" +related-components: +- story: + - Carolina#1_SourcesCrossAnalysis +project: polifonia-project +work-package: + - WP2 + - WP3 + - WP4 + - WP5 +pilot: +- MUSICBO +--- + +# Carolina + +This is a description of the persona Carolina. + +## Name +Carolina + +## Age + + +## Occupation +- Primary role + - Music historian +- Secondary roles + - Researcher + +## Knowledge/Skills +She is a researcher and collaborates with the International Museum and the Music Library of Bologna. + +## Interests +Carolina has a passion for the Baroque. + diff --git a/content/_spice-h2020/ecosystem/content/_polifonia-project/stories/David_Music_Historian/David#1_MusicHistorian.md b/content/_spice-h2020/ecosystem/content/_polifonia-project/stories/David_Music_Historian/David#1_MusicHistorian.md new file mode 100644 index 00000000..fa5a0e68 --- /dev/null +++ b/content/_spice-h2020/ecosystem/content/_polifonia-project/stories/David_Music_Historian/David#1_MusicHistorian.md @@ -0,0 +1,72 @@ +--- +component-id: David#1_MusicHistorian +name: Music Historian +type: Story +related-components: + - persona: + - David +keywords: + - music historian + - social history of music +work-package: + - WP2 + - WP3 + - WP4 + - WP5 +pilot: + - MEETUPS +story type: +- expert driven +--- +# Music Historian + +## Persona +David + +## Goal +David is interested in understanding the social history of music, e.g. who were the musicians, who was the audience, how did a particular musical environment relate to the wider musical environment. He has a particular interest in understanding the environment around music which would not be classified as ‘elite’ music. He is also interested in people’s experience of listening to music. + +## Scenario +David will work for several years collecting information, e.g. about brass bands, and populating a database with this information, as a preparation for writing a book. + +## Competency questions + +CQ1: What music was being played? + +CQ2: Who was listening to the music? + +CQ3: How did the instruments used change over time? + +CQ4: What was source of money – e.g. for instruments? + +CQ5: Where were the musicians coming from? + +CQ6: Who were teaching the musicians? + +CQ7: Where were the places (in which they played)? + +CQ8: How were instruments sold? + +CQ9: What was the sales pitch? + +CQ10: How does the world of brass bands connect with that of elite music? + +CQ11: What did people ‘make of’, how did they react to, e.g. music? + +CQ12: Who was the author of a piece of music? +e.g. can we identify it by its incipit? + +CQ13: What was the country of origin of a piece of music? + +CQ14: What is the cause of a change, e.g. increasing number of concerts over time? + + +## Resources +David's work could be helped by: + +sonic visualization, e.g. to recognise ‘thumbprints’ to identify the composer of a piece of music; + +search of document database using RDF annotations and query-time reasoning; + +NLP, e.g. to suggest relevant keywords for annotation; including using some semantics, e.g. proposing an annotation which does not occur in the text. + diff --git a/content/_spice-h2020/ecosystem/content/_polifonia-project/stories/David_Music_Historian/readme.md b/content/_spice-h2020/ecosystem/content/_polifonia-project/stories/David_Music_Historian/readme.md new file mode 100644 index 00000000..e6483539 --- /dev/null +++ b/content/_spice-h2020/ecosystem/content/_polifonia-project/stories/David_Music_Historian/readme.md @@ -0,0 +1,29 @@ +--- +component-id: David +name: "David" +description: Music Historian. +type: Persona +long-title: "David" +related-components: + - story: + - David#1_MusicHistorian +project: polifonia-project +pilot: + - MEETUPS +--- + +# David + +A music historian working in the music department of a university. + +## Name +David + +## Occupation +Researcher and professor + +## Knowledge/Skills +David’s original degree was in history. He also played the trombone professionally in an orchestra. His Ph.D. was in the history of the trombone. + +## Interests +David has in interest in the history of brass instruments. diff --git a/content/_spice-h2020/ecosystem/content/_polifonia-project/stories/Frank_Organist/Frank#1_OrganKnowledge.md b/content/_spice-h2020/ecosystem/content/_polifonia-project/stories/Frank_Organist/Frank#1_OrganKnowledge.md new file mode 100644 index 00000000..21bcfd80 --- /dev/null +++ b/content/_spice-h2020/ecosystem/content/_polifonia-project/stories/Frank_Organist/Frank#1_OrganKnowledge.md @@ -0,0 +1,50 @@ +--- +component-id: Frank#1_OrganKnowledge +name: Organ Knowledge +persona: + - Frank +keywords: + - dutch organs + - instruments + - tradition +work-package: + - WP2 +pilot: + - ORGANS +type: Story +story type: + - expert driven +related-components: +- persona: + - Frank +--- +# Organ Knowledge + +## Goal + +Frank wants to learn more about other organs in the country. + +## Scenario + +Frank regularly comes together with his friends from church. They want to plan a little weekend trip to the other side of the country and go to the church mass on Sunday in the city nearby where they are staying. To prepare for the visit, he wants to learn about the church and the organ. In general, Frank wants to find out more about the history of the church, but also what the similarities are between how the organs (the one he usually plays and the one he will listen to) sounds. For this, Frank is primarily interested in the disposition and who built the organ. + +## Competency questions + +See story Paul#1_OrganComparison for retrieval CQs. In addition: + +CQ1: What does the organ look like? + +CQ2: Which organs are built in a similar style? + +CQ3: What is the concert agenda for organ X? + +CQ4: Where to find audio/video resources featuring organ X? + +CQ5: Where to find audio/video resources featuring organist X? + +CQ6: What is the address of the owner/maintainer of organ X? + +## Resources + +- Frank has some of the books from the Dutch organ encyclopaedia. He is also not too familiar with the contents of the books. Sometimes he scans through it looking for interesting facts, but does not know much about most organs that are mentioned in the books. +- When Frank wants to look up specific information, he always uses his standard search engine on his internet browser, unless he knows that the organ he wants to research is in one of the encyclopaedia editions he owns. On the internet, however, he cannot always find or access the information he wants. \ No newline at end of file diff --git a/content/_spice-h2020/ecosystem/content/_polifonia-project/stories/Frank_Organist/readme.md b/content/_spice-h2020/ecosystem/content/_polifonia-project/stories/Frank_Organist/readme.md new file mode 100644 index 00000000..85e16c5c --- /dev/null +++ b/content/_spice-h2020/ecosystem/content/_polifonia-project/stories/Frank_Organist/readme.md @@ -0,0 +1,35 @@ +--- +component-id: Frank +name: "Frank" +description: History Teacher at the Secondary Level, Amateur Organist. +type: Persona +long-title: "Frank" +related-components: + - story: + - Frank#1_OrganKnowledge +project: polifonia-project +pilot: + - ORGANS +--- + +# Frank + +This is a description of the persona Frank. + +## Name +Frank + +## Age +45 + +## Occupation +- Primary + - Organist at the church in his hometown. +- Secondary + - History teacher at his local high school. + +## Knowledge/Skills +Frank has a history degree and teaches history class in high school. On the side, he is the organist at his local church. He is very familiar with the history and technicalities of his home organ and church. + +## Interests +For Frank, playing the organ is more of a hobby rather than a job. Growing up, Frank has always been interested in music. When he found his other passion—history—the two combined perfectly in the organ. He is interested in both the musical and historical aspects. diff --git a/content/_spice-h2020/ecosystem/content/_polifonia-project/stories/Jorge_Librarian/Jorge#1_OrganizeMyLibrary.md b/content/_spice-h2020/ecosystem/content/_polifonia-project/stories/Jorge_Librarian/Jorge#1_OrganizeMyLibrary.md new file mode 100644 index 00000000..52768213 --- /dev/null +++ b/content/_spice-h2020/ecosystem/content/_polifonia-project/stories/Jorge_Librarian/Jorge#1_OrganizeMyLibrary.md @@ -0,0 +1,65 @@ +--- +component-id: Jorge#1_OrganizeMyLibrary +name: "Organize my digital library" +description: "Jorge manages a digital library of scores. He aims at describing each score with a rich set of contextual information, although a comprehensive description is often not possible." +persona: + - Jorge +keywords: + - Improve findability + - Provide summary of content +work-package: + - WP1 + - WP2 + - WP3 +pilot: + - FACETS +type: Story +story type: + - expert driven +related-components: +- persona: + - Jorge +- story: + - Jorge#2_FindSimilarScores + - Jorge#3_DynamicExploration + - Jorge#4_LinkToSources +--- +# Organize my digital library + +## Persona +Jorge + +## Goal + +Jorge manages a digital library of scores. He aims at describing each score with a rich set of contextual information, although a comprehensive descritpion is often not possible. Among these information, one finds + - The musical work (or 'Opus') this score belongs to. A score can cover a whole Opus (e.g., a symphony) or only a part of it (e.g., the third movement). Additionally, it can contain only some parts, all the parts, a trasnscription for non-original instruments, etc. Jorge wants to preserve each score at the appropriate level, with an adequate referencing (e.g., the official Opus number in a standard catalogue exists, K234.a, of BWV192, etc.) + - Licence and copyright information + - Relations to standard external resources to refer to, e.g., composers or other kind of authorships + - An organisation in collections, clear and flexible + - Tools to manage this organisation and search for relevant scores. + + + + +## Scenario + +The dashboard presents an overview of the classifying dimensions. Some of them might be hierarchical in nature (for instance the region/country/city classification). Whenever a dimension is chosen, the dashboard adapts to this new context by refining the classification of scores based on sub-dimensions. + +## Competency questions + +CQ1: Is there a digital space to represent and describe the concept of "Opus", and store digital scores related to an opus ? + +CQ2: Is my collections and subcollections organisation based on clear concepts (e.g., genre, composer, period...) to gather Opuses ? + +CQ3: Am I able to nagivate, search and visualize my collections and opus ? + +CQ4: Can I examine *features* extracted from digital scores (e.g., tonality, when relevant, number of parts, etc.) ? + +CQ5: Identify the dimensions and/or features that are relevant to support an on-the-fly reorganization of the digital library, whenever the standard organization does not meet the user's expectations + +CQ6: Can I progressively explore the content of my library, adding criteria to refine large results ? + +## Resources + +In terms of digital tools, Jorge wants a dashboard that gives him at a glance on overview of a score library content. The overview shows statistics on the library organized after the many dimensions that can be used to classify score: composer, period, countries/region/city, style, length, tonality, prominent patterns, orchestration, etc. + diff --git a/content/_spice-h2020/ecosystem/content/_polifonia-project/stories/Jorge_Librarian/Jorge#2_FindSimilarScores.md b/content/_spice-h2020/ecosystem/content/_polifonia-project/stories/Jorge_Librarian/Jorge#2_FindSimilarScores.md new file mode 100644 index 00000000..4670592f --- /dev/null +++ b/content/_spice-h2020/ecosystem/content/_polifonia-project/stories/Jorge_Librarian/Jorge#2_FindSimilarScores.md @@ -0,0 +1,64 @@ +--- +component-id: Jorge#2_FindSimilarScores +name: "Find similar scores" +description: "People interested in finding scores ask for some very specific criterias. Often, the resulting list is empty, or contains very few scores." +persona: + - Jorge +keywords: + - improve findability + - help library visitors to get what they search for by relaxing their initial searches +work-package: + - WP1 + - WP2 + - WP3 +pilot: + - FACETS +type: Story +story type: + - expert driven +related-components: +- persona: + - Jorge +- story: + - Jorge#1_OrganizeMyLibrary + - Jorge#3_DynamicExploration + - Jorge#4_LinkToSources +--- +# Find similar scores + +## Persona +Jorge + +## Goal +People interested in finding scores ask for some very specific criterias, such as for instance + - "I would like the list of Toccatas for organ in D composed at Rome during the 17th century" + - "I would like an easy piece for violin and guitar, less than 3 mns long" + - "I am searching for all the trios written by W.A. Mozart" + +The resulting list is empty, or contains very few scores. The visitors are disapointed, and do not know how to find sth that suits their need. + +## Scenario + +Jorge then uses a tool to propose similar work, close to the initial search. It might be (referring to the above list of examples) + - the list of toccatas in Italy, or italian organ works in general, or toccatas written for the harpsichord, in a different key signature, or comparable pieces written in other countries, etc. + - a piece written for oboe and guitar, or any other monodic instrument, suitable for transposing as a violin part + - trios written by J. Haydn or composers from the same style/period + + Jorge can propose these similar scores to his visitors. + + +## Competency questions + +CQ1: Can I use similarity criterias based on metadata and features of musical works ? + +CQ2: Am I proposed the most relevant criterias, or combination of criterias, to expand an initial search ? + + + +## Resources + + +Jorge's work could be helped by tools that expand a search, relaxing criterias in a controlled way. +Such a dashboard presents an overview of the classifying dimensions. Some of them might be hierarchical in nature (for instance the region/country/city classification). Whenever a dimension is chosen, the dashboard adapts to this new context by refining the classification of scores based on sub-dimensions. + + diff --git a/content/_spice-h2020/ecosystem/content/_polifonia-project/stories/Jorge_Librarian/Jorge#3_DynamicExploration.md b/content/_spice-h2020/ecosystem/content/_polifonia-project/stories/Jorge_Librarian/Jorge#3_DynamicExploration.md new file mode 100644 index 00000000..2d8aa28f --- /dev/null +++ b/content/_spice-h2020/ecosystem/content/_polifonia-project/stories/Jorge_Librarian/Jorge#3_DynamicExploration.md @@ -0,0 +1,48 @@ +--- +component-id: Jorge#3_DynamicExploration +name: "Dynamic exploration" +description: "Jorge accesses to a part of the collection and is overwhelmed by the number of scores and their heterogeneity." +persona: + - Jorge +keywords: + - help Jorge and visitors explore the library content + - refine/expand the current result of a search +work-package: + - WP1 + - WP2 + - WP3 +pilot: + - FACETS +type: Story +story type: + - expert driven +related-components: +- persona: + - Jorge +- story: + - Jorge#1_OrganizeMyLibrary + - Jorge#2_FindSimilarScores + - Jorge#4_LinkToSources +--- +# Dynamic exploration + +## Persona +Jorge + +## Goal +Jorge accesses to a part of the collection and is overwhelmed by the number of scores and their heterogeneity. Thus he expects the system to propose a set of means to refine the search, based on "facets" that partition the current result in a meaningful way. + +## Scenario + +Instead of showing a list containing thousands of scores, the result is organized in groups, and each group is characterized by a distinctive property value. These group can be determined on the fly: if one accesses to the collection of a composer for instance, then relevant groups are the period of composition, the style, the genre, etc. If, instead, the initial access is on a genre, then grouping include composers. + +A nice feature would be to determine on the fly a partitioning of scores based on their salient patterns, if relevant. + +## Competency questions + +CQ1: What are the most relevant dimensions to classify on the fly a result set ? + +CQ2: What features can be extracted on the fly from a score content to partition in a meaning ful way a score collection? + +## Resources + diff --git a/content/_spice-h2020/ecosystem/content/_polifonia-project/stories/Jorge_Librarian/Jorge#4_LinkToSources.md b/content/_spice-h2020/ecosystem/content/_polifonia-project/stories/Jorge_Librarian/Jorge#4_LinkToSources.md new file mode 100644 index 00000000..e5e0f4f2 --- /dev/null +++ b/content/_spice-h2020/ecosystem/content/_polifonia-project/stories/Jorge_Librarian/Jorge#4_LinkToSources.md @@ -0,0 +1,56 @@ +--- +component-id: Jorge#4_LinkToSources +name: Librarian +persona: + - Jorge +keywords: + - help Jorge to link his library's scores to sources related to the musical opus + - compare / align external sources with a score +work-package: + - WP1 + - WP2 + - WP3 +pilot: + - FACETS +type: Story +story type: + - expert driven +related-components: +- persona: + - Jorge +- story: + - Jorge#1_OrganizeMyLibrary + - Jorge#3_DynamicExploration + - Jorge#2_FindSimilarScores +--- +# Link To Sources + +## Persona +Jorge + +## Goal + +Each Opus in Jorge's library features at least one score that gives a description, using music notation, of the Opus content. There exist other sources, often (but not always) that relate to the same Opus: scan of music scores (e.g., Gallica or IMSLP), textbooks or manuscripts, audio and video documents. +Jorge would like to refer to these source in order to, for instance, be able to give its visitors a link to listen to a music piece, or obtain additional information on the Opus, its historical context, its dissemination. + +## Scenario + +The user interface of Jorge's library shows mainly a rendering of the music score. It should be enriched with links that help to +get a comprehensive list of textual, visual, audio or video sources. The visitor could then be invited to listen a part of the displayed score, +or to look at a performance of the Opus by a specific artist. + +Ideally, there would exist an alignment of score fragments at a given granularity (e.g., measures) and corresponding fragments in a source. For instance, all measure in a piano sonata would be aligned with a temporal inteerval in a performance. One can imagine that a parallel rendering of both representations would be possible, +e.g., listening to the performance while highlighting at the same time the measure being played. + +## Competency questions + +CQ1: Can I link digital sources to an Opus ? + +CQ2: Can I refer to *fragments* of a scoree, eg., the first 10 measures, or part B of a movement ? + +CQ3: Can I link fragments of sources and fragments of scores, such as for instance a measure in an Opus with a part of a reecording ? + +## Resources + +The Web annotation ontology ? https://www.w3.org/ns/oa + diff --git a/content/_spice-h2020/ecosystem/content/_polifonia-project/stories/Jorge_Librarian/readme.md b/content/_spice-h2020/ecosystem/content/_polifonia-project/stories/Jorge_Librarian/readme.md new file mode 100644 index 00000000..7d5d7b62 --- /dev/null +++ b/content/_spice-h2020/ecosystem/content/_polifonia-project/stories/Jorge_Librarian/readme.md @@ -0,0 +1,38 @@ +--- +component-id: Jorge +name: "Jorge" +description: "Jorge is managing a library of music scores, and is responsible for the sub-part of the library catalog which is already digitized." +type: Persona +long-title: "Jorge" +related-components: +- story: + - Jorge#1_OrganizeMyLibrary + - Jorge#2_FindSimilarScores + - Jorge#3_DynamicExploration + - Jorge#4_LinkToSources +project: polifonia-project +pilot: + - FACETS +--- + +# Jorge + +Jorge is managing a library of music scores, and is responsible for the sub-part of the library catalog which is already digitized. +A digitized score is available in one of the following format: PDF/JPEG (resulting from a scan of the sheet score), or MusicXML/MEI (resulting from an OMR, transcription or direct manual input). + +## Name +Jorge + +## Age + + +## Occupation +- primary role + - Librarian + +## Knowledge/Skills +He is a librarian, with skills in digital libraries. + +## Interests +He is involved in advanced technologies to promote the content of his library. + diff --git a/content/_spice-h2020/ecosystem/content/_polifonia-project/stories/Keith_Music_Producer/Keith#1_MusicConnections.md b/content/_spice-h2020/ecosystem/content/_polifonia-project/stories/Keith_Music_Producer/Keith#1_MusicConnections.md new file mode 100644 index 00000000..52af0887 --- /dev/null +++ b/content/_spice-h2020/ecosystem/content/_polifonia-project/stories/Keith_Music_Producer/Keith#1_MusicConnections.md @@ -0,0 +1,53 @@ +--- +component-id: Keith#1_MusicConnections +name: Music Connections +type: Story +description: Keith wants to compile programmes of music, e.g. for music festivals. +related-components: + - persona: + - Keith +keywords: + - connections between music + - serendipitous discovery +work-package: + - WP5 +pilot: + - INTERLINK +story type: + - expert driven +--- +# Music Connections + +## Persona +Keith + +## Goal +Keith wants to compile programmes of music, e.g. for music festivals. This involves putting together related pieces of music; in some cases the relationship may be non-obvious. + +## Scenario +Keith uses music catalogues (see below), his own experiences of listening to music, and his intuition. + +## Competency questions + +CQ1: Can we find relations between different artists and music? + +CQ2: Can we find music which has a non-obvious relation to some other music? + +CQ3: Can we visualize the relations between different media, different music etc? + +## Resources + +Keith uses: + +Music catalogues, e.g. https://www.discogs.com/ and https://bandcamp.com/ + +Radio channels, e.g. https://www.nts.live/ + +Google trends, to analyse how often an artists is searched for on the Web + +Keith’s work could be helped by: + +Technology to find connections between different artists, media, music, video etc; these connections might sometimes be non-obvious. + +Technology to visualize these connections. + diff --git a/content/_spice-h2020/ecosystem/content/_polifonia-project/stories/Keith_Music_Producer/readme.md b/content/_spice-h2020/ecosystem/content/_polifonia-project/stories/Keith_Music_Producer/readme.md new file mode 100644 index 00000000..73a393c9 --- /dev/null +++ b/content/_spice-h2020/ecosystem/content/_polifonia-project/stories/Keith_Music_Producer/readme.md @@ -0,0 +1,33 @@ +--- +component-id: Keith +name: "Keith" +description: +type: Persona +long-title: "Keith" +related-components: + - story: + - Keith#1_MusicConnections +project: polifonia-project +pilot: + - INTERLINK +--- + +# Keith + +Event and music producer + +## Name + +Keith + +## Occupation + +Keith has a variety of roles, including researcher and lecturer at a university and communications director of a music festival. + +## Knowledge/Skills + +Keith’s original degree was in computer science, and he has a Masters degree in Communication and New Media. + +## Interests + +Keith is interested in creating programmes of music, e.g. for music festivals. diff --git a/content/_spice-h2020/ecosystem/content/_polifonia-project/stories/Keoma_Architect/Keoma#1_RestorationAndSoundPractices.md b/content/_spice-h2020/ecosystem/content/_polifonia-project/stories/Keoma_Architect/Keoma#1_RestorationAndSoundPractices.md new file mode 100644 index 00000000..9fa193e5 --- /dev/null +++ b/content/_spice-h2020/ecosystem/content/_polifonia-project/stories/Keoma_Architect/Keoma#1_RestorationAndSoundPractices.md @@ -0,0 +1,167 @@ +--- +component-id: Keoma#1_RestorationAndSoundPractices +name: Restoration and Sound Practices +type: Story +description: Keoma needs to find different types of information and data from different sources. His goal is to interconnect data between architectures and their use for sound performances, in order to proceed with protection actions. +related-components: + - persona: + - Keoma +keywords: + - sound practices + - restoration +work-package: + - WP1 + - WP2 + - WP4 +pilot: + - BELLS +story type: + - expert driven +--- +# Restoration and Sound Practices + +## Persona + +Keoma is an architect and a restorer + +## Goal + +Keoma needs to find different types of information and data from different sources. His goal is to interconnect data between architectures and their use for sound performances, in order to proceed with protection actions. +- sound practices +- restoration + +## Scenario + +Keoma has to elaborate a restructuring plan for various bell towers and wants to know what kind of interventions to implement on the wall structure respecting the characteristics related to the use of the bell cell as a component of the production of the sound of the bells, and respecting traditional local sound practices. + +Keoma therefore needs to know what kind of sound practices are performed in each bell tower, starting with the names of the Churches given in a list. + +Keoma begins to search for the location of each church, also to understand if it is an urban, peri-urban or rural context. + +Keoma needs to know if in each of the churches the practices of sound are currently practiced by moving the bells manually or electrically. + +Furthermore, for static calculations he needs to know with which mounting system each bell is attached, its weight, its measurements and the material it is made of. + +If the sound techniques involve human beings, Keoma needs to understand what kind of tools (ropes, pestels) are used in the production of the sound and how many people are generally employed in the performance. + +Keoma also needs to know if there are sound documents that testify to the musical performances carried out in the different bell towers, to document the use of the bells in different events. + +To do this, Keoma would have to resort to numerous searches: bibliographic sources, archival sources, field surveys and interviews with informed people. + +Thanks to Polifonia, Keoma can carry out these searches automatically, through a keyword search, retrieving a large amount of data in a short time. + +## Example data +The searched data can be divided into: Architectural data, Data related to the bells, Data related to sound practices, Data related to sound recordings, Bibliographic data. + +**Architectural data** + +The data related to each individual Italian church can be retrieved on the web from different sources, in particular Wikipedia, the General Catalog of Cultural Heritage, BeWeb, for example: + +- The Church of San Pietro di Sturla is located in Carasco, Genova Province. + +- The Church was built in 1619 + +- The Church is located in a periurban context + +**Data relating to the bells** + +The data relating to the bells can be searched on some websites of the sector such as Associazione Campanari Liguri, Associazione Italiana di Campanologia; in the specialized bibliography of which very few publications are online; in the General Catalog of Cultural Heritage. +For example: + +- The bells in the Church of San Pietro di Sturla are 5 (a “bell concert”) + +- The mounting system of the bells is the “falling clapper” (battaglio cadente) + +- The bells are all made of bronze and they were melted by Fonderia Regolo Capanni di Fidenza (PR) in 1929 + +- The bells have different nominal notes: REb3, MI b3, FA3, SOL b3, LA b3. The extension of the whole concert is an “intervallo di quinta” + +- The bells have different weights (…), and measures (…) + +**Data relating to sound practices and musical features** + +The data relating to the bells can be searched in specialized bibliography, of which very few publications are online. Data relating to the bells can be searched on some websites of the sector such as Associazione Italiana di Campanologia, Associazione Campanari Liguri etc, and in the general catalog of Cultural Heritage. +For example: + +- All the bells of San Pietro di Sturla, according to their mounting system, can be played “a distesa” (bell moving about 30°), ”a bicchiere” (starting the movement with the bells upside down, at 180°), “a campane ferme” (the bells are not moving, only the clappers will be moved, producing sound). + +- All the bells of San Pietro di Sturla are played both electrically and manually + +- When played manually “a campane ferme”, the bells are played by the use of “cordette” (small chains connected with the arms and the legs of the bell ringers), a “tastiera” (keybords) or manually moving the clappers. This characterizes the “sistema ligure”, toghether with other elements + +- Some repertoires/genres are tipically associated with specific events. + +- One genre is the “Salmo alla Romana” + +- The “Salmo alla Romana” is usually associated with the day of a “Festa Patronale”, or the day before the “Festa Patronale”. + +- Sometimes the repertoire of bell ringers includes pieces taken from contemporary music. The execution of such songs is not connected to any particular event, but it is performed by the bell ringers for exibition and fun. + +**Data relating to sound recordings** + +Recordings data can be searched in some portals for example Alphabetica and in Youtube, for audiovisuals containing bell concerts. +For example: + +- A concert played in San Pietro di Sturla for exhibition was recorded by ethnomusicologist Mauro Balma in 1994 + +- The concert is divided in 6 different “sonate” + +- One sonata is a “Salmo alla Romana” + +- The Salmo alla Romana was performed by Armando Rocca + +- Another sonata is from a contemporary repertoire. It is called “Marina” and it is taken from the song Marina Marina written by Rocco Granata in 1959 + +**Bibliographic data** + +The Bibliographic data can be searched by keywords in : Opac, Alphabetica, campanology Websites +For example: + +- The Sistema Ligure is described in the text “Campanari, campane e campanili di Liguria” By Mauro Balma, 1996 + +## Competency questions + +- CQ1: Where is the building? +- CQ2: When (what year) was the building built? +- CQ3: In which context is the building located (urban, periurban...)? +- CQ4: Are there bells in a church? +- CQ5: Which is the mounting system of the bell? +- CQ6: Which is the material of the bell? +- CQ7: By whom (by which foundry) were they cast? +- CQ8: In which year were they cast? +- CQ9: Which is the nominal/fundamental note of the bell? +- CQ10: Which is the extension of the whole set of bells in a bell tower? +- CQ11: Which is the weight of the bell? +- CQ12: Which are the measures of the bell? +- CQ13: What kind of execution techniques are possible to perform according to a specific mounting system? +- CQ14: Can a set of bells be played electrically, manually, or both? +- CQ15: How/Using which tools is the set of bells played, when played manually? +- CQ16: Where is the church/bell tower? +- CQ17: How many bells are in the church? +- CQ18: Which is the mouth diameter of the bell? +- CQ19: Which is the sound bow of the bell? +- CQ20: Using which execution technique(s) is the set of bells played, when played manually? + +**old** + +CQ2: What kind of repertoire is traditionally performed in that specific location? + +CQ3: What is the dating that accompanies this repertoire? + +CQ4: Is it a repertoire performed in the contemporary world? + +CQ5: Is it a repertoire in that place currently performed by hand or by electric means? + +CQ6: How many people are generally employed for the performance of these sound practices? + +CQ7: Are there significant variations in the frequencies between the manually produced sound and the electric sound, such as to have repercussions on the soundscape? + +CQ8: Are there historical evidences of the presence of that sound in the surrounding area? + +## Resources +- General Catalog of Cultural Heritage: https://catalogo.beniculturali.it/ +- WikiPedia: https://it.wikipedia.org/wiki/Pagina_principale +- BeWeb: https://beweb.chiesacattolica.it/?l=it_IT +- Opac-SBN: https://opac.sbn.it/ +- Alphabetica: https://alphabetica.it/web/alphabetica/ +- Campanology websites: https://campanologia.org/ ; https://www.campanologia.it/; https://www.campanariliguri.it/; diff --git a/content/_spice-h2020/ecosystem/content/_polifonia-project/stories/Keoma_Architect/readme.md b/content/_spice-h2020/ecosystem/content/_polifonia-project/stories/Keoma_Architect/readme.md new file mode 100644 index 00000000..92667b4a --- /dev/null +++ b/content/_spice-h2020/ecosystem/content/_polifonia-project/stories/Keoma_Architect/readme.md @@ -0,0 +1,36 @@ +--- +component-id: Keoma +name: "Keoma" +description: Architect and Restorer +type: Persona +long-title: "Keoma" +related-components: + - story: + - Keoma#1_RestorationAndSoundPractices +project: polifonia-project +pilot: + - BELLS +--- + +# Keoma + +This is a description of the persona Keoma + +## Name +Keoma + +## Age + + +## Occupation +- primary role + - Architect +- secondary roles + - Restorer + +## Knowledge/Skills +He is an architect and a restorer. + +## Interests +He is involved in the cultural heritage protection. + diff --git a/content/_spice-h2020/ecosystem/content/_polifonia-project/stories/LICENSE.md b/content/_spice-h2020/ecosystem/content/_polifonia-project/stories/LICENSE.md new file mode 100644 index 00000000..f8a5e157 --- /dev/null +++ b/content/_spice-h2020/ecosystem/content/_polifonia-project/stories/LICENSE.md @@ -0,0 +1,96 @@ +Creative Commons Attribution 4.0 International Public License +https://creativecommons.org/licenses/by/4.0/legalcode + +By exercising the Licensed Rights (defined below), You accept and agree to be bound by the terms and conditions of this Creative Commons Attribution 4.0 International Public License ("Public License"). To the extent this Public License may be interpreted as a contract, You are granted the Licensed Rights in consideration of Your acceptance of these terms and conditions, and the Licensor grants You such rights in consideration of benefits the Licensor receives from making the Licensed Material available under these terms and conditions. + +Section 1 – Definitions. + + Adapted Material means material subject to Copyright and Similar Rights that is derived from or based upon the Licensed Material and in which the Licensed Material is translated, altered, arranged, transformed, or otherwise modified in a manner requiring permission under the Copyright and Similar Rights held by the Licensor. For purposes of this Public License, where the Licensed Material is a musical work, performance, or sound recording, Adapted Material is always produced where the Licensed Material is synched in timed relation with a moving image. + Adapter's License means the license You apply to Your Copyright and Similar Rights in Your contributions to Adapted Material in accordance with the terms and conditions of this Public License. + Copyright and Similar Rights means copyright and/or similar rights closely related to copyright including, without limitation, performance, broadcast, sound recording, and Sui Generis Database Rights, without regard to how the rights are labeled or categorized. For purposes of this Public License, the rights specified in Section 2(b)(1)-(2) are not Copyright and Similar Rights. + Effective Technological Measures means those measures that, in the absence of proper authority, may not be circumvented under laws fulfilling obligations under Article 11 of the WIPO Copyright Treaty adopted on December 20, 1996, and/or similar international agreements. + Exceptions and Limitations means fair use, fair dealing, and/or any other exception or limitation to Copyright and Similar Rights that applies to Your use of the Licensed Material. + Licensed Material means the artistic or literary work, database, or other material to which the Licensor applied this Public License. + Licensed Rights means the rights granted to You subject to the terms and conditions of this Public License, which are limited to all Copyright and Similar Rights that apply to Your use of the Licensed Material and that the Licensor has authority to license. + Licensor means the individual(s) or entity(ies) granting rights under this Public License. + Share means to provide material to the public by any means or process that requires permission under the Licensed Rights, such as reproduction, public display, public performance, distribution, dissemination, communication, or importation, and to make material available to the public including in ways that members of the public may access the material from a place and at a time individually chosen by them. + Sui Generis Database Rights means rights other than copyright resulting from Directive 96/9/EC of the European Parliament and of the Council of 11 March 1996 on the legal protection of databases, as amended and/or succeeded, as well as other essentially equivalent rights anywhere in the world. + You means the individual or entity exercising the Licensed Rights under this Public License. Your has a corresponding meaning. + +Section 2 – Scope. + + License grant. + Subject to the terms and conditions of this Public License, the Licensor hereby grants You a worldwide, royalty-free, non-sublicensable, non-exclusive, irrevocable license to exercise the Licensed Rights in the Licensed Material to: + reproduce and Share the Licensed Material, in whole or in part; and + produce, reproduce, and Share Adapted Material. + Exceptions and Limitations. For the avoidance of doubt, where Exceptions and Limitations apply to Your use, this Public License does not apply, and You do not need to comply with its terms and conditions. + Term. The term of this Public License is specified in Section 6(a). + Media and formats; technical modifications allowed. The Licensor authorizes You to exercise the Licensed Rights in all media and formats whether now known or hereafter created, and to make technical modifications necessary to do so. The Licensor waives and/or agrees not to assert any right or authority to forbid You from making technical modifications necessary to exercise the Licensed Rights, including technical modifications necessary to circumvent Effective Technological Measures. For purposes of this Public License, simply making modifications authorized by this Section 2(a)(4) never produces Adapted Material. + Downstream recipients. + Offer from the Licensor – Licensed Material. Every recipient of the Licensed Material automatically receives an offer from the Licensor to exercise the Licensed Rights under the terms and conditions of this Public License. + No downstream restrictions. You may not offer or impose any additional or different terms or conditions on, or apply any Effective Technological Measures to, the Licensed Material if doing so restricts exercise of the Licensed Rights by any recipient of the Licensed Material. + No endorsement. Nothing in this Public License constitutes or may be construed as permission to assert or imply that You are, or that Your use of the Licensed Material is, connected with, or sponsored, endorsed, or granted official status by, the Licensor or others designated to receive attribution as provided in Section 3(a)(1)(A)(i). + + Other rights. + Moral rights, such as the right of integrity, are not licensed under this Public License, nor are publicity, privacy, and/or other similar personality rights; however, to the extent possible, the Licensor waives and/or agrees not to assert any such rights held by the Licensor to the limited extent necessary to allow You to exercise the Licensed Rights, but not otherwise. + Patent and trademark rights are not licensed under this Public License. + To the extent possible, the Licensor waives any right to collect royalties from You for the exercise of the Licensed Rights, whether directly or through a collecting society under any voluntary or waivable statutory or compulsory licensing scheme. In all other cases the Licensor expressly reserves any right to collect such royalties. + +Section 3 – License Conditions. + +Your exercise of the Licensed Rights is expressly made subject to the following conditions. + + Attribution. + + If You Share the Licensed Material (including in modified form), You must: + retain the following if it is supplied by the Licensor with the Licensed Material: + identification of the creator(s) of the Licensed Material and any others designated to receive attribution, in any reasonable manner requested by the Licensor (including by pseudonym if designated); + a copyright notice; + a notice that refers to this Public License; + a notice that refers to the disclaimer of warranties; + a URI or hyperlink to the Licensed Material to the extent reasonably practicable; + indicate if You modified the Licensed Material and retain an indication of any previous modifications; and + indicate the Licensed Material is licensed under this Public License, and include the text of, or the URI or hyperlink to, this Public License. + You may satisfy the conditions in Section 3(a)(1) in any reasonable manner based on the medium, means, and context in which You Share the Licensed Material. For example, it may be reasonable to satisfy the conditions by providing a URI or hyperlink to a resource that includes the required information. + If requested by the Licensor, You must remove any of the information required by Section 3(a)(1)(A) to the extent reasonably practicable. + If You Share Adapted Material You produce, the Adapter's License You apply must not prevent recipients of the Adapted Material from complying with this Public License. + +Section 4 – Sui Generis Database Rights. + +Where the Licensed Rights include Sui Generis Database Rights that apply to Your use of the Licensed Material: + + for the avoidance of doubt, Section 2(a)(1) grants You the right to extract, reuse, reproduce, and Share all or a substantial portion of the contents of the database; + if You include all or a substantial portion of the database contents in a database in which You have Sui Generis Database Rights, then the database in which You have Sui Generis Database Rights (but not its individual contents) is Adapted Material; and + You must comply with the conditions in Section 3(a) if You Share all or a substantial portion of the contents of the database. + +For the avoidance of doubt, this Section 4 supplements and does not replace Your obligations under this Public License where the Licensed Rights include other Copyright and Similar Rights. + +Section 5 – Disclaimer of Warranties and Limitation of Liability. + + Unless otherwise separately undertaken by the Licensor, to the extent possible, the Licensor offers the Licensed Material as-is and as-available, and makes no representations or warranties of any kind concerning the Licensed Material, whether express, implied, statutory, or other. This includes, without limitation, warranties of title, merchantability, fitness for a particular purpose, non-infringement, absence of latent or other defects, accuracy, or the presence or absence of errors, whether or not known or discoverable. Where disclaimers of warranties are not allowed in full or in part, this disclaimer may not apply to You. + To the extent possible, in no event will the Licensor be liable to You on any legal theory (including, without limitation, negligence) or otherwise for any direct, special, indirect, incidental, consequential, punitive, exemplary, or other losses, costs, expenses, or damages arising out of this Public License or use of the Licensed Material, even if the Licensor has been advised of the possibility of such losses, costs, expenses, or damages. Where a limitation of liability is not allowed in full or in part, this limitation may not apply to You. + + The disclaimer of warranties and limitation of liability provided above shall be interpreted in a manner that, to the extent possible, most closely approximates an absolute disclaimer and waiver of all liability. + +Section 6 – Term and Termination. + + This Public License applies for the term of the Copyright and Similar Rights licensed here. However, if You fail to comply with this Public License, then Your rights under this Public License terminate automatically. + + Where Your right to use the Licensed Material has terminated under Section 6(a), it reinstates: + automatically as of the date the violation is cured, provided it is cured within 30 days of Your discovery of the violation; or + upon express reinstatement by the Licensor. + For the avoidance of doubt, this Section 6(b) does not affect any right the Licensor may have to seek remedies for Your violations of this Public License. + For the avoidance of doubt, the Licensor may also offer the Licensed Material under separate terms or conditions or stop distributing the Licensed Material at any time; however, doing so will not terminate this Public License. + Sections 1, 5, 6, 7, and 8 survive termination of this Public License. + +Section 7 – Other Terms and Conditions. + + The Licensor shall not be bound by any additional or different terms or conditions communicated by You unless expressly agreed. + Any arrangements, understandings, or agreements regarding the Licensed Material not stated herein are separate from and independent of the terms and conditions of this Public License. + +Section 8 – Interpretation. + + For the avoidance of doubt, this Public License does not, and shall not be interpreted to, reduce, limit, restrict, or impose conditions on any use of the Licensed Material that could lawfully be made without permission under this Public License. + To the extent possible, if any provision of this Public License is deemed unenforceable, it shall be automatically reformed to the minimum extent necessary to make it enforceable. If the provision cannot be reformed, it shall be severed from this Public License without affecting the enforceability of the remaining terms and conditions. + No term or condition of this Public License will be waived and no failure to comply consented to unless expressly agreed to by the Licensor. + Nothing in this Public License constitutes or may be interpreted as a limitation upon, or waiver of, any privileges and immunities that apply to the Licensor or You, including from the legal processes of any jurisdiction or authority. diff --git a/content/_spice-h2020/ecosystem/content/_polifonia-project/stories/Laurent_Music_Journalist/Laurent#1_MusicArchives.md b/content/_spice-h2020/ecosystem/content/_polifonia-project/stories/Laurent_Music_Journalist/Laurent#1_MusicArchives.md new file mode 100644 index 00000000..c5f85a83 --- /dev/null +++ b/content/_spice-h2020/ecosystem/content/_polifonia-project/stories/Laurent_Music_Journalist/Laurent#1_MusicArchives.md @@ -0,0 +1,54 @@ +--- +component-id: Laurent#1_MusicArchives +name: Music Archives +type: Story +description: Laurent's goal is to discover and explore Archives, Historical and Research Resources that deal with music in some way, for the extrapolation of content to be included in his newsletter. +related-components: + - persona: + - Laurent + - musoW +keywords: + - music archives + - historical resources +work-package: + - -- +pilot: [] +story type: + - expert driven +--- +# Music Archives + +## Persona +Laurent is a music journalist. He produces a newsletter called Music Journalism Insider. + +## Goal +Laurent's goal is to discover and explore Archives, Historical and Research Resources that deal with music in some way, for the extrapolation of content to be included in his newsletter. + + +## Scenario +Every week, Laurent highlights some of the best stuff he hears, reads, and watches; publishes news about the industry; and interviews writers, scholars, and editors about their work. He wants to share knowledge, celebrate great work, and expand the idea of ​​what music journalism is — and where it happens. He uses music catalogues (see below), his own experiences of listening to music, and his inspiration. + + +## Competency questions + +CQ1: Can I search for a musical content by applying filters (genre, historical period ...)? + +CQ2: What types of resources can I find? + +CQ3: Is the music resource X complete or incomplete? + +CQ4: Is a dataset attached to resource X? + +CQ5: Can I add resources as a user? + +CQ6: How can I share what I find on the site? + + +## Resources +- Laurent uses: + - Music catalogues, e.g. http://projects.dharc.unibo.it/musow/ (musoW) + - Music Archives, Online Resources, Google Trends +- Laurent could be helped by: + - a technology to find and easily explore archival & historical music resources; + - a technology o seek for relations and connections between music contents; + - a technology to visualize the above-mentioned contents. diff --git a/content/_spice-h2020/ecosystem/content/_polifonia-project/stories/Laurent_Music_Journalist/readme.md b/content/_spice-h2020/ecosystem/content/_polifonia-project/stories/Laurent_Music_Journalist/readme.md new file mode 100644 index 00000000..9de0bcad --- /dev/null +++ b/content/_spice-h2020/ecosystem/content/_polifonia-project/stories/Laurent_Music_Journalist/readme.md @@ -0,0 +1,33 @@ +--- +component-id: Laurent +name: "Laurent" +description: "Music Journalist and Editor." +type: Persona +long-title: "Laurent" +related-components: + - story: + - Laurent#1_MusicArchives +project: polifonia-project +pilot: [] +--- + +# Laurent +This is a description of the persona Laurent. + +## Name +Laurent + +## Age +47 + +## Occupation +- Primary role + - Music Journalist +- Secondary roles + - Editor + +## Knowledge/Skills +Laurent has a BA in History | Art History. He has always been passionate about music and is now an established music journalist. He is former EIC of several editorial companies that deal with music content and now manages Music Journalism Insider, a newsletter in which he highlights some of the best music he listens to, reads or discovers. He publishes news about the industry and interviews writers, scholars, and editors about their work. + +## Interests +Laurent is primarily interested in discovering and exploring new musical content that he can include in his newsletter and share with his readers. diff --git a/content/_spice-h2020/ecosystem/content/_polifonia-project/stories/Linka_Computer_Scientist/Linka#1_MusicKnowledge.md b/content/_spice-h2020/ecosystem/content/_polifonia-project/stories/Linka_Computer_Scientist/Linka#1_MusicKnowledge.md new file mode 100644 index 00000000..ddad17b2 --- /dev/null +++ b/content/_spice-h2020/ecosystem/content/_polifonia-project/stories/Linka_Computer_Scientist/Linka#1_MusicKnowledge.md @@ -0,0 +1,102 @@ +--- +component-id: Linka#1_MusicKnowledge +name: Music Knowledge +type: Story +description: Linka is interested in finding and describing music-related data of heterogeneous types and formats from different sources. Thanks to the ability to represent diverse content, her main goal is to inter-relate music data so as to enable large scale experiments for knowledge discovery. +related-components: + - persona: + - Linka +keywords: + - multi-modality + - music Data +work-package: + - WP2 +pilot: + - INTERLINK +story type: + - pilot driven +--- +# Music Knowledge + +## Persona + +Linka is a researcher in Computer Science. + +## Goal +*Keywords: music data, multi-modal data, knowledge discovery* + +Linka is interested in finding and describing music-related data of heterogeneous types and formats from different sources. Thanks to the ability to represent diverse content, her main goal is to inter-relate music data so as to enable large scale experiments for knowledge discovery. + + +## Scenario + +Linka is carrying out a lot of manual work to find music data on the Web from different datasets, catalogues, platforms and services. In addition, all this data needs to be carefully inspected so that information retrieved from a certain source is matched to that of another source, in a consistent manner. Although this can be done for a few musical objects of interest (e.g. information related to an album), the analytical and manual process becomes unmanageable and tremendously time-consuming when the scope of the analysis increases (e.g. hundreds of songs). In turn, this jeopardises the possibility of conducting large scale computational studies, which are fundamental for knowledge discovery -- establishing links among musical objects and entities to unveil novel relationships[^1]. + +Thanks to Polifonia, Linka will be able to perform all the aforementioned operations in an automatic manner and with little supervision. Starting from some desiderata -- names of artists, composers, musical pieces, albums -- the system will attempt to find pertinent music-related data from a specific set of resources (see below). Besides the computational requirements of the system for finding relevant content, this requires to semantically describe the retrieved data. + + + +## Example Data + +For simplicity, music-related data can be categorised as: cataloguing information, musical features, musical facts, and musical relationships. + +**Cataloguing information** + +Information needed to identify a musical work as well as its relationships with the recordings that are derived from it. For example: +- "Penny Lane" is a musical work by "The Beatles", +- "Penny Lane" was composed by "John Lennon" and "Paul McCartney"; +- The first recording of "Penny Lane" was released in February 1967 as a double A-side single with "Strawberry Fields Forever". + +Linka is interested in tracing the basic discographic information associated to the recordings of a certain musical work, including: the year of release, the name of the release, the place of recording, the instruments/performers relationships, and the vocals. Example data to address this category can be found from [MusicBrainz] and [Wikidata]. + +- The specific track of "Penny Lane" featured in the album "Strawberry Fields Forever" was recorded in "Abbey Road Studios: Studio 2" in "St John's Wood, Westminster, London (UK)" on 1966-12-29, 1967-01-05, 1967-01-06, 1967-01-09, and 1967-01-17. +- The album "Strawberry Fields Forever" has been originally released in the UK on 1967-02-17. +- The background and lead vocals of "Penny Lane" are by "Paul McCartney". +- The "tambourine" is played by "Ringo Starr". + + +**Musical features** + +Musical features can range from simple tags associated with a musical work or a recording (e.g. genre, style) to the tonality, tempo, and lyrics of the piece. Music tags are generally provided by all databases mentioned in the Resources section below; tonality and tempo are given by [Wikidata] and [MusicBrainz] through [AcousticBrainz][-2]; lyrics are provided by [Genius] and [SongFacts]. Please, note that Linka aims at modelling lyrics at the sentence level (bottom level) and also at the sectional level (top level). This last consideration is not fully captured by the example reported below, although the data provided by [Genius] and [SongFacts] allows for this organisation. + +- The musical work "Penny Lane" has genres/style "baroque pop" and "psychedelic pop". +- The "original mono studio mix" of "Penny Lane" featured in "Strawberry Fields Forever" is in B minor (tonality = key + scale) and its tempo is 115 BPM. +- The (original) lyrics of "Penny Lane" are "In Penny Lane there is a barber showing photographs + "\n" + Of every head he's had the pleasure to know ..." + +**Musical facts** + +These are usually considered as textual explanations of the lyrics (e.g. "Four of fish and finger pies" in "Penny Lane" is explained by Genius as a "common order size for fish and chips, that is what you could get with four pence..."), curiosities on the composition process (e.g. "The piquant trumpet part was added after the rest of the song was finished."), pertinent artist-related facts (e.g. "Lennon's mother Julia at one time worked as a cinema usher and a waitress in Penny Lane."), but also relationships to places and their meaning in the lyrics (e.g. "Penny Lane, Liverpool, England"). These can be found from [Genius] and [SongFacts]. + + +**Musical relationships** + +Establishing connections among pieces and recordings based on objective and/or subjective criteria and musical properties. These include derivative works (e.g. covers, medleys, mesh-ups), and sampling relationships where a certain musical pattern -- originally set/induced in a musical work, is found in other pieces. Examples of the former can be found from [SecondHandSongs] and [MusicBrainz], whereas [WhoSampled] provides data for the latter. + +- "Penny Lane" is referred to in the following medleys: "A Liverpool Day", "She's leaving home / Penny Lane" and "The Music of the Beatles". +- "Penny Lane" was translated in Finnish with "Rööperiin". +- An instrumental version of "Penny Lane" was released by Al Di Meola in September 2013. +- "Penny Lane" was covered by "Kai Warner" in 1976. +- "Penny Lane" was sampled by "The Rutles" in "Doubleback Alley". The sample appears at 1:16, 1:35, 1:43, and 2:35, corresponding to 1:12, 1:27, 1:33, and 2:41 in the original performance. + +## Data resources + +An example of music-related data for the same song (Penny Lane) is given below. + +* MusicBrainz: https://musicbrainz.org/work/1548c8c0-108f-33a8-9671-a025aa68e382 +* Wikidata: https://www.wikidata.org/wiki/Q842168 +* Genius: https://genius.com/1376466 +* SongFacts: https://www.songfacts.com/facts/the-beatles/penny-lane +* SecondHandSongs: https://secondhandsongs.com/work/8937/ +* WhoSampled: https://www.whosampled.com/The-Beatles/Penny-Lane/ + +[MusicBrainz]: https://musicbrainz.org/work/1548c8c0-108f-33a8-9671-a025aa68e382 +[Wikidata]: https://www.wikidata.org/wiki/Q842168 +[Genius]: https://genius.com/1376466 +[SongFacts]: https://www.songfacts.com/facts/the-beatles/penny-lane +[SecondHandSongs]: https://secondhandsongs.com/work/8937/ +[WhoSampled]: https://www.whosampled.com/The-Beatles/Penny-Lane/ + + +[^1] This use case is the specific scope of another story: Linka#2 (coming soon). + +[-2] AcousticBrainz will be shut down (news from Mid-Feb 2022). diff --git a/content/_spice-h2020/ecosystem/content/_polifonia-project/stories/Linka_Computer_Scientist/readme.md b/content/_spice-h2020/ecosystem/content/_polifonia-project/stories/Linka_Computer_Scientist/readme.md new file mode 100644 index 00000000..5c2d0eb0 --- /dev/null +++ b/content/_spice-h2020/ecosystem/content/_polifonia-project/stories/Linka_Computer_Scientist/readme.md @@ -0,0 +1,33 @@ +--- +component-id: Linka +name: "Linka" +description: Lecturer and Researcher in Computer Science. +type: Persona +long-title: "Linka" +related-components: + - story: + - Linka#1_MusicKnowledge +project: polifonia-project +pilot: + - INTERLINK +--- + +# Linka +Lecturer in Music Technology + + +## Name +Linka + +## Age +34 + +## Occupation +- primary role + - Researcher in Computer Science + +## Knowledge/Skills +She is a specialist in Network Data Analysis and Semantic Web technologies, especially in the music domain. + +## Interests +Her goal is to represent and combine musical knowledge from different sources to perform Knowledge Discovery. Therefore, Linka's main interest is mining music knowledge graphs to unveil unknown relationships between artists, composers, and musical pieces. diff --git a/content/_spice-h2020/ecosystem/content/_polifonia-project/stories/Mark_Computational_Musicologist/Mark#1_FolkMusic.md b/content/_spice-h2020/ecosystem/content/_polifonia-project/stories/Mark_Computational_Musicologist/Mark#1_FolkMusic.md new file mode 100644 index 00000000..e39f97f9 --- /dev/null +++ b/content/_spice-h2020/ecosystem/content/_polifonia-project/stories/Mark_Computational_Musicologist/Mark#1_FolkMusic.md @@ -0,0 +1,157 @@ +--- +component-id: Mark#1_FolkMusic +name: Folk Music +type: Story +description: Mark is interested in understanding how Dutch folk tunes relate to other music, e.g. French court operas. +related-components: + - persona: + - Mark +keywords: + - folk music +work-package: + - WP2 +pilot: + - TUNES +story type: + - expert driven +--- + + +# Folk Music + +## Persona + +Mark + +## Goal + +Mark is interested in understanding how Dutch folk tunes relate to other music, e.g. from French court operas. + +## Scenario + +Starting from a collection of Dutch folk tunes, Mark attempts to relate these tunes to other documented music, using a variety of databases. Mark would like to relate individual pieces of music, but also repertoires generally. He would also like to understand musical evolution and transmission over time. + +## Competency questions + +CQ1: Is a composer known for composition X? + +CQ2: What is the name of the composer specified in the source of this composition? + +CQ3: What is the similarity between compositions X and Y given similarity measure Z? + +CQ4: Which tunes are similar to tune X given similarity measure Y? + +CQ5: Who (which source) attributed composition X to composer Y? + +CQ6: Which are all known concordances (same composition/tune in another source)? + +CQ7: Which concordances of composition X have a composer name associated? + +CQ8: What is the geographic origin of source X? + +CQ9: Who were the owners of (manuscript) source X? + +CQ10: Who was/were the scribe(s) of (manuscript) source X? + +CQ11: What was the repertoire of scribe X (i.e. all compostions written down by X)? + +CQ12: Who was the publisher of (printed) source X? + +CQ13: What is publication year of printed source X? + +CQ14: Which are all compositions that are in source X? + +CQ15: What is current location of source X? + +CQ16: Where to find a digital scan of source X (url)? + +CQ17: What is the title of composition X in source Y? + +CQ18: What is the tune indication of composition X in source Y? + +CQ19: On what page (or folio) is composition X in source Y? + +CQ20: What is the serial number of composition X in source Y? + +CQ21: What printed source shares content with manuscript X? + +CQ22: What is the language of the lyrics of tune X? + +CQ23: Has composition X been identified as variant in a tune family? + +CQ24: Which tune family does composition X belong to? + +CQ25: Who assigned composition X to tune family Y? + +CQ26: With what level of confidence is composition X a variant in tune family Y? + +CQ27: What are all compositions in tune family X? + +CQ28: What are the similarities / differences of all compositions in tune family X according to measure Y? + +CQ29: To what tune families is tune family X related, given similarity measure Y? + +CQ30: What are alternative titles for composition X? + +CQ31: What are the differences / similarities between two corpora of compositions concerning features Y1..Yn? + +CQ32: What are longitudinal differences / similarities within a corpus concerning features Y1..Yn? + +CQ33: What are the differences / similarities between two corpora of compositions concerning occurrences of patterns? + +CQ34: What are longitudinal differences / similarities within a corpus concerning occurrences of patterns? + +CQ35: What patterns do the compositions in corpus X share? + +CQ36: What patterns are overrepresented in corpus X compared to corpus Y? + +## Resources + +Mark works with: + +RISM index of musical sources, composers etc https://opac.rism.info/main-menu-/kachelmenu with the ability to search on names and music + +NEUMA, a digital library of musical scores http://neuma.huma-num.fr/home/presentation + +ABC notation database http://abcnotation.com/ + +Database of Dutch tunes http://www.liederenban.nl and http://www.liederebank.nl/mtc + +Early American Secular Music and its European sources http://www.cdss.org/elibrary/Easmes/Index.htm + +Mark’s work could be helped by: + +Connection to other databases, so that he could automatically query a range of databases + +## Remarks + +Requirements for UI: +- visualize changes in musical style over time, e.g. transition from modal to tonal (evolution curves - see Weiss et al. 2018 ‘Investigating style evolution of Western classical music: A computational approach’) - with zooming +- visualize interconnections, e.g. of tunes which share melodic patterns or geographical origin. +- vizualize the circulation of certain repertoires (e.g. Italian opera). + +Previous set of CQs. These are quite abstract, but not yet all covered by the set of CQs in the section above. + +CQ1: Can we identify a tune (e.g. from an oral tradition) in our collection with music in another documented collection, e.g. RISM, NEUMA, ABC + +CQ2: Can we trace the origin of a melody (composer)? + +CQ2: Can we compare music from different collections, e.g. from different countries to show connections/influences between musical styles? + +CQ3: Can we compare music longitudinally, e.g. to see evolution of tonality and transition from modal to tonal? Working either at manuscript of collection level. + +CQ4: Can we visualize changes in musical style over time, e.g. transition from modal to tonal (evolution curves - see Weiss et al. 2018 ‘Investigating style evolution of Western classical music: A computational approach’) - with zooming + +CQ5: Can we visualize interconnections, e.g. of tunes which share melodic patterns or geographical origin? + +CQ6: Can we link repertoires by scribes? + +CQ7: Do 'national' repertoires have specific characteristics in terms of frequent patterns? + +CQ8: How does translation (of lyrics) affect variation in the melody? + +CQ9: Which aspects of melody vary and which remain stable in the process of transmission and adaptation? + +CQ10: How did certain repertoires (e.g., Italian opera) circulate? + +CQ11: What are the connections between printed sources and manuscripts? (manuscripts could be copies of printed sources) \ No newline at end of file diff --git a/content/_spice-h2020/ecosystem/content/_polifonia-project/stories/Mark_Computational_Musicologist/readme.md b/content/_spice-h2020/ecosystem/content/_polifonia-project/stories/Mark_Computational_Musicologist/readme.md new file mode 100644 index 00000000..162848ac --- /dev/null +++ b/content/_spice-h2020/ecosystem/content/_polifonia-project/stories/Mark_Computational_Musicologist/readme.md @@ -0,0 +1,33 @@ +--- +component-id: Mark +name: "Mark" +description: Computational Musicologist +type: Persona +long-title: "Mark" +related-components: + - story: + - Mark#1_FolkMusic +project: polifonia-project +pilot: + - TUNES +--- + +# Mark + +A computational musicologist working in the music department of a university. + +## Name + +Mark + +## Occupation + +Researcher and lecturer at a university + +## Knowledge/Skills + +Mark’s original degree was in electrical engineering, and he has an MA and Ph.D. in musicology. + +## Interests + +Mark is interested in using computational methods to understand oral music traditions. diff --git a/content/_spice-h2020/ecosystem/content/_polifonia-project/stories/Ortenz_Music_Historian/Ortenz#1_MusicAndChildhood.md b/content/_spice-h2020/ecosystem/content/_polifonia-project/stories/Ortenz_Music_Historian/Ortenz#1_MusicAndChildhood.md new file mode 100644 index 00000000..e08a175f --- /dev/null +++ b/content/_spice-h2020/ecosystem/content/_polifonia-project/stories/Ortenz_Music_Historian/Ortenz#1_MusicAndChildhood.md @@ -0,0 +1,72 @@ +--- +component-id: Ortenz#1_MusicAndChildhood +name: Music and Childhood +type: Story +description: "Ortenz is planning to write an article about the role of music in children education as a means to develop national identity." +related-components: + - persona: + - Ortenz +keywords: + - music history + - national identity + - childhood + - text analysis + - art interpretation +work-package: + - WP1 +pilot: + - CHILD +story type: + - expert driven +--- +# Music and Childhood + +## Persona + +Ortenz is a Music historian with a background in art history and literature. + +## Goal + +Ortenz is planning to write an article about the role of music in children education as a means to develop national identity. + +## Scenario + +Ortenz wants to characterize children’s experience of music as witnessed in bibliographic and artistic sources. She is looking for primary sources (e.g. Personal journals, literary texts) wherein to find evidence of listening experiences. + +She needs to collect and analyze large corpora of texts and images recording or depicting children’s experience with music. Documents include official sources (e.g. newspaper articles, reviews of concerts, paintings) and sources produced by “ordinary people”. She prefers the latter as they provide more reliable feedback, and she looks at the context of production of such sources (where, when, who created the source, the goal, which related events exist), contents (recurring motifs and themes), and elicited emotional responses. She collects sources belonging to different historical periods so as to characterize the development of identified phenomena. + +## Competency questions + +CQ1. What is the difference between the ‘official’ perception of the role of music and how music is experienced? + + * CQ1.1 What are the aspects that characterize an official source? E.g. context of production, creator’s role, relation between the source and political or national events + * CQ1.2 What are the official sources? What are the sources produced by ordinary people? + * CQ1.3 What are the aspects that characterize the source at hand as trustworthy? E.g. context of production, creator’s occupation + * CQ1.4 What are the goals of the source? + * CQ1.5 What is the emotional response of music listeners/authors in both types of sources? + +CQ2. How is music used to teach children about identity and heritage? + + * CQ2.1 In which source there is evidence of children as target audience and music as subject? + * CQ2.2 What are the subjects of the source? + * CQ2.3 What are the subjects that are related to national identity? + * CQ2.4 What are the subjects that are related to national heritage? + * CQ2.5 When was the source produced? + * CQ2.6 Where was the source produced? + * CQ2.7 Who produced the source? + * CQ2.8 What type of source is it? E.g. Literary text, painting + * CQ2.9 What is the context of production of the source? E.g. academic, official source, personal or primary source, commission to an artist + * CQ2.10 Are there related events connected to the production of the source? E.g. national fests, war period, political or historical events + * CQ2.11 What is context of usage of the source? E.g. propaganda, personal account, review of an event, teaching + * CQ2.12 How subjects/goals/contexts of usage change over time? + +CQ3. What is the adult perception of the role of music in children’s education? + + * CQ3.1 What is the goal of the official sources? + * CQ3.2 To what extent are music subjects relevant (among the others) in official sources targeted to children? + + +## Resources + + * The [LED database](http://www.listeningexperience.org/) + * (online) catalogues of artworks diff --git a/content/_spice-h2020/ecosystem/content/_polifonia-project/stories/Ortenz_Music_Historian/Ortenz#2_MusicalSocialNetwork.md b/content/_spice-h2020/ecosystem/content/_polifonia-project/stories/Ortenz_Music_Historian/Ortenz#2_MusicalSocialNetwork.md new file mode 100644 index 00000000..335595ff --- /dev/null +++ b/content/_spice-h2020/ecosystem/content/_polifonia-project/stories/Ortenz_Music_Historian/Ortenz#2_MusicalSocialNetwork.md @@ -0,0 +1,78 @@ +--- +component-id: Ortenz#2_MusicalSocialNetwork +name: Musical Social Network +type: Story +description: "Ortenz would like to have a system for visualising events (meetings of composers and musicians) in time and space in + order to track musicians' careers, their overlap and intersections, gathering trends in time and space, and making + emerge patterns of knowledge transmission." +related-components: + - persona: + - Ortenz +keywords: + - music history + - events + - encounters + - text analysis + - annotation + - collaboration +work-package: + - WP1 + - WP4 + - WP2 +pilot: + - MEETUPS +story type: + - expert driven +--- +# Musical Social Network + +## Persona + +Ortenz is a Music historian with a background in art history and literature. + +## Goal + +Ortenz would like to have a system for visualising events (meetings of composers and musicians) in time and space in +order to track musicians' careers, their overlap and intersections, gathering trends in time and space, and making +emerge patterns of knowledge transmission. + +## Scenario + +Ortenz is supervising a PhD student. +They want to explore a database of prosopographic information of personalities relevant to the musical cultural heritage, focusing primarily on +musicians' career but also involving relevant people in sectors such as art, politics, and industry. +They are interested in the events and facts nad how they are linked to the sources (biographies, letters, memoirs, encyclopedia). +She wants the system to allow her to make annotations on the content, rate the quality of the sources and the accuracy of the statements, +and curate collections of facts/statements/events as material for scholarship. + +## Competency questions + +CQ1: What places did muciain Z visited in her career? + +* Where did she perform? +* Where did she live? + +CQ2: Did musician X and performer Y ever met? Where, when, and why? + +CQ3: In what context the meeting happend? + +* What is the nature of the event? +* Was it a celebration, a festival, a private event? +* Was it a religious or a secolar event? +* Who paid to support the event? + +CQ4: What is the provenance of the event attendees? What and how they happened to be there? + +* Did they travel to reach the place? +* Where they invited? Was the meeting accidental? + +CQ5: How can we characterize the relation among the participants? + +* Was there a power relation? (e.g. Patreon / Musician) + + + +## Resources +- Wikipedia +- DBpedia +- Wikidata diff --git a/content/_spice-h2020/ecosystem/content/_polifonia-project/stories/Ortenz_Music_Historian/readme.md b/content/_spice-h2020/ecosystem/content/_polifonia-project/stories/Ortenz_Music_Historian/readme.md new file mode 100644 index 00000000..03c3ee69 --- /dev/null +++ b/content/_spice-h2020/ecosystem/content/_polifonia-project/stories/Ortenz_Music_Historian/readme.md @@ -0,0 +1,38 @@ +--- +component-id: Ortenz +name: "Ortenz" +description: Music, Art and Cultural Historian +type: Persona +long-title: "Ortenz" +related-components: + - Ortenz#1_MusicAndChildhood + - Ortenz#2_MusicalSocialNetwork +project: polifonia-project +pilot: + - MEETUPS + - CHILD +--- + +# Ortenz + +This is a description of the persona Ortenz. +The persona is relevant to pilots #CHILD and #MEETS. + +## Name +Ortenz + +## Age +34 + +## Occupation + + * Primary role: Music historian + * Secondary role: + * Art historian + * Cultural historian + +## Knowledge/Skills +She is research fellow at the Music Department. Her background is in literature and art history. She researches historical accounts of people’s experiences of listening to music, with a particular interest in the listening experiences of 'ordinary people'. + +## Interests +She is looking for bibliographic evidence relating music, national identity, and childhood. diff --git a/content/_spice-h2020/ecosystem/content/_polifonia-project/stories/Patrizia_Ethnoanthropologist/Patrizia#1_IdentificationOfIntangibleElements.md b/content/_spice-h2020/ecosystem/content/_polifonia-project/stories/Patrizia_Ethnoanthropologist/Patrizia#1_IdentificationOfIntangibleElements.md new file mode 100644 index 00000000..b78eac32 --- /dev/null +++ b/content/_spice-h2020/ecosystem/content/_polifonia-project/stories/Patrizia_Ethnoanthropologist/Patrizia#1_IdentificationOfIntangibleElements.md @@ -0,0 +1,49 @@ +--- +component-id: Patrizia#1_IdentificationOfIntangibleElements +name: Identification of Intangible Elements +type: Story +description: Patrizia must propose safeguarding actions on some intangible practices related with bells sound. +related-components: + - persona: + - Patrizia +keywords: + - bell sound practices + - transmission of knowledge +work-package: + - WP1 +pilot: + - BELLS +story type: + - expert driven +--- +# Identification of Intangible Elements + +## Persona + +Patrizia is a Demo-Ethno-anthropologist Officer of the Ministry of Culture and she is responsible for the safeguarding of the Intangible Heritage pursuant to the 2003 Unesco Convention + +## Goal + +Patrizia must propose safeguarding actions on some intangible practices related with bells sound. + +## Scenario + +She needs to know if the bells of some bell towers are involved in sound practices conducted by collective social actors who recognize those practices as constitutive trait of their collective identity. Patrizia starts her investigation from the denominations and the location of the bell towers. Patrizia checks whether the sound of those bell towers is done by hand or through electrification. Patrizia checks if there are more or less formalized and more or less recognized human groups that carry out those practices. Patrizia investigate what kind of transmission and apprenticeship methods are carried out: in what age groups, if exclusively male or even female, with what type of transmission (oral, written, formal or informal). + +## Competency questions + +CQ1: Where is the Bell Tower? + +CQ2: What kind of execution characterizes that bell tower? + +CQ3: Are there formalized collective actors who carry out sound practices? + +CQ4: Are there formalized transmission methods that characterize the acquisition of skills related to the bell practice? + +CQ5: Which age groups do the transmission practices involve? + +CQ6: Do transmission practices involve both men and women? + +CQ7: Do transmission practices include the organization of public events? + +## Resources diff --git a/content/_spice-h2020/ecosystem/content/_polifonia-project/stories/Patrizia_Ethnoanthropologist/readme.md b/content/_spice-h2020/ecosystem/content/_polifonia-project/stories/Patrizia_Ethnoanthropologist/readme.md new file mode 100644 index 00000000..eb66d427 --- /dev/null +++ b/content/_spice-h2020/ecosystem/content/_polifonia-project/stories/Patrizia_Ethnoanthropologist/readme.md @@ -0,0 +1,35 @@ +--- +component-id: Patrizia +name: "Patrizia" +description: Ethnoanthropologist and Officer at the Italian Ministry of Culture +type: Persona +long-title: "Patrizia" +related-components: + - Patrizia#1_IdentificationOfIntangibleElements +project: polifonia-project +pilot: + - BELLS +--- + +# Patrizia + +This is a description of the persona Patrizia + +## Name +Patrizia + +## Age +44 + +## Occupation +- primary role + - ethnoanthropologist +- secondary roles + - Officer at Ministry of Culture + + +## Knowledge/Skills +She is an ethnoanthropologist with interests on intangible heritage and informal transmission of knowledge among communities, especially concerning traditional theatre and music practices. + +## Interests +She is involved in research on in intangible heritage safeguarding and she cooperate with communities and groups on safeguarding policies. diff --git a/content/_spice-h2020/ecosystem/content/_polifonia-project/stories/Paul_Organ_Advisor/Paul#1_OrganComparison.md b/content/_spice-h2020/ecosystem/content/_polifonia-project/stories/Paul_Organ_Advisor/Paul#1_OrganComparison.md new file mode 100644 index 00000000..92da81d3 --- /dev/null +++ b/content/_spice-h2020/ecosystem/content/_polifonia-project/stories/Paul_Organ_Advisor/Paul#1_OrganComparison.md @@ -0,0 +1,166 @@ +--- +component-id: Paul#1_OrganComparison +name: Organ Comparison +type: Story +description: Successfully plan the restoration of an organ. +related-components: + - persona: + - Paul +keywords: + - dutch organs + - instruments + - tradition +work-package: + - WP2 + - WP5 +pilot: + - ORGANS +story type: + - expert driven +--- +# Organ Comparison + +## Goal + +Successfully plan the restoration of an organ? + +## Scenario + +Paul is hired to plan and supervise the restoration of an organ in a church. The church is from the sixteenth century. Paul would look up the organ in the Dutch organ encyclopaedia, which is ordered based on the year of production of the case of the organ. Because the organ is not necessarily made in the same year as the church, he first has to go through the indexes of the encyclopaedias in order to find the correct organ. Then, Paul searches for all organs that are made by the same organ builder. These organs are used as material for comparisons of the technical features. Finding and comparing the organs can be a tedious and time-consuming task. If this can be simplified and sped up by the use of the portal, Paul would save a lot of time and effort. +As it turns out, the comparison of the disposition indicates that the organ up for restoration has been changed significantly during a previous restoration. This is indicated by a large difference in base stops that is used in all organs by the same organ builder. Paul now has to go visit some of the other organs and the church archives in order to verify whether some of those stops were the original stops or not. + +## Competency questions + +CQ1: Which are all organs at location X? + +CQ2: Which are all organs in city X? + +CQ3: Which are all organs near to geographic coordinates x, y? + +CQ4: Which are all organs that have stop X? + +CQ5: Which are all organs with playing aid X? + +CQ6: Which are all organs with more than X stops? + +CQ7: Which are all organs with more than X keyboards? + +CQ8: Which are all organs with manual range starting lower than X? + +CQ9: Which are all organs with manual range ending higher than X? + +CQ10: Which are all organs with pedal range starting lower than X? + +CQ11: Which are all organs with pedal range ending higher than X? + +CQ12: Which are all organs with wind system type X? + +CQ13: Which are all organs with temperament X? + +CQ14: Which are all organs with year of construction after X? + +CQ15: Which are all organs with year of construction before X? + +CQ16: Which are all organs built by organ builder X? + +CQ17: Which are all organs which has been maintained or restored by builder X? + +CQ18: Which are all organs that are linked to person X? + +CQ19: Which are all organs with more than X bellows? + +CQ20: Which are all organs with pitch heigher than X Hz? + +CQ21: Which are all organs with wind pressure higher than X mm? + +CQ22: Which are all organs with wind pressure lower than X mm? + +CQ23: Which are all organs with key action type X? + +CQ24: Which are all organs with stop action type X? + +CQ25: Which are all organs with pitch lower than X Hz? + +CQ26: Which are all organs with console location X? + +CQ27: Which are all organs based on search term X? + +CQ28: Who was the builder of organ X? + +CQ29: What is the current location of organ X? + +CQ30: What were previous locations of organ X? + +CQ31: When have changes been made to organ X? + +CQ32: What changes have been made to organ X? + +CQ33: Why was change X made to organ Y? + +CQ34: What is the current disposition of organ X? + +CQ35: What is the composition of stop X in organ Y? + +CQ36: Of which material is stop X made? + +CQ37: Of which material is the case of organ X made? + +CQ38: Which materials have been used to build organ X? + +CQ39: What historic dispositions are known for organ X? + +CQ40: What is the source for historic disposition X of organ Y? + +CQ41: What is the current pitch of organ X? + +CQ42: What is the current temperature of organ X? + +CQ43: What is the current console location of organ X? + +CQ44: What is the current wind system type of organ X? + +CQ45: What is the current wind pressure of organ X? + +CQ46: What are the characteristics of the case of organ X? + +CQ47: What are decorative elements of the case of organ X? + +CQ48: What are inscriptions on the case of organ X? + +CQ49: What are the dispositions of all organs made by organ builder X? + +CQ50: What are the sources for fact X? + +CQ51: Given organ X, what organs are similar according to similarity measure Y? + +CQ52: What literature exists about organ X? + +CQ53: What literature exists about organ builder X? + +CQ54: What sources are available regarding the sound of organ X? + +CQ55: Does organ X stil exist? + +CQ56: When was organ X deconstructed? + +CQ57: Why was organ X deconstructed? + +CQ58: Which parts of deconstructed organ X are still in existence? + +CQ59: Where are parts of deconstructed organ X currently located? + +CQ60: Which components of organ X have been reused from other (earlier) organs? + +CQ61: What is the origin of component X of organ Y? + +CQ62: Which are all organs that have components made by organ builder X? + +## Resources + +- Het Historische Orgel in Nederland – Dutch organ encyclopedia +- http://www.orgbase.nl – Dutch hobbyist-made website with information similar to Het Historische Orgel +- Church and city archives +- Old newspapers +- https://www.delpher.nl – digital database from the Dutch Royal Library +- Organs (sight visits) +- http://www.dtbob.org/ – Belgian digital organ database diff --git a/content/_spice-h2020/ecosystem/content/_polifonia-project/stories/Paul_Organ_Advisor/Paul#2_ResourceReliability.md b/content/_spice-h2020/ecosystem/content/_polifonia-project/stories/Paul_Organ_Advisor/Paul#2_ResourceReliability.md new file mode 100644 index 00000000..5e3d826c --- /dev/null +++ b/content/_spice-h2020/ecosystem/content/_polifonia-project/stories/Paul_Organ_Advisor/Paul#2_ResourceReliability.md @@ -0,0 +1,43 @@ +--- +component-id: Paul#2_ResourceReliability +name: Resource Reliability +type: Story +description: Successfully plan the restoration of an organ. +related-components: + - persona: + - Paul +keywords: + - dutch organs + - instruments + - tradition +work-package: + - WP2 + - WP5 +pilot: + - ORGANS +story type: + - expert driven +--- +# Resource Reliability + +## Goal + +Successfully plan the restoration of an organ. + +## Scenario + +In this scenario Paul has to plan a similar restoration. However, from his already extensive knowledge about organs, he is uncertain about some of the information that was provided by the encyclopaedia. That is, some of the mentioned components and technicalities such as the pitch seemed illogical when compared to the other organs from the same organ builder. If it would somehow be possible to mark facts that are unreliable or untrue, that would save Paul a lot of time checking, verifying, and notating. + +## Competency questions + +CQ42: What are the sources for fact X? + +## Resources + +- Het Historische Orgel in Nederland – Dutch organ encyclopedia +- http://www.orgbase.nl – Dutch hobbyist-made website with information similar to Het Historische Orgel +- Church and city archives +- Old newspapers +- https://www.delpher.nl – digital database from the Dutch Royal Library +- Organs (sight visits) +- http://www.dtbob.org/ – Belgian digital organ database \ No newline at end of file diff --git a/content/_spice-h2020/ecosystem/content/_polifonia-project/stories/Paul_Organ_Advisor/readme.md b/content/_spice-h2020/ecosystem/content/_polifonia-project/stories/Paul_Organ_Advisor/readme.md new file mode 100644 index 00000000..48beb710 --- /dev/null +++ b/content/_spice-h2020/ecosystem/content/_polifonia-project/stories/Paul_Organ_Advisor/readme.md @@ -0,0 +1,33 @@ +--- +component-id: Paul +name: "Paul" +description: Organist, Organ Builder and Musicologist. +type: Persona +long-title: "Paul" +related-components: + - Paul#1_OrganComparison + - Paul#2_ResourceReliability +project: polifonia-project +pilot: + - ORGANS +--- + +# Persona description + +This is a description of the persona Paul + +## Name +Paul + +## Age +38 + +## Occupation +Organ advisor / Organ builder + + +## Knowledge/Skills +Paul is also an organist with a background in musicology. + +## Interests +Besides it being his job, organs and music in general are also hobbies of his. Moreover, Paul is highly engaged in the preservation of organs as national and artistic heritage. diff --git a/content/_spice-h2020/ecosystem/content/_polifonia-project/stories/README.md b/content/_spice-h2020/ecosystem/content/_polifonia-project/stories/README.md new file mode 100644 index 00000000..0d594aac --- /dev/null +++ b/content/_spice-h2020/ecosystem/content/_polifonia-project/stories/README.md @@ -0,0 +1,98 @@ +# Polifonia Stories + +This is a shared common space to share stories for the Polifonia project. A story is a template for collecting requirements. + +A story is composed of: +- **Persona** + - It is a research-based description of a typical user. + - It contains attributes such as name, age, occupation (if the persona has more than one role, indicate which one is their primary role and which one(s) the secondary role(s)), and relevant characteristics of the person such as their knowledge and skills and their interests. +-  **Goal** + - It is a short textual description of the goal(s) that the persona needs to be addressed in the story. + - maximum number of characters: 1200 + - The goal(s) is(are) also represented by a short (maximum 5) list of keywords. +- **Scenario** + - It is a story describing how the persona's task/need/problem is solved before, during and after interaction with the resource/software/service being developed. + - maximum number of characters: 1200 +- **Competency questions (CQs)** + - Question(s) the persona needs the resource/software/service to answer for satisfying their task/need/problem. +- **Resources (optional)** + - List of resources (with references/links) where it is expected or known that the persona can find what she's looking for. + +## How to create a story + +There is one folder for each Persona, named with the name of the Persona and their primary occupation. + +1. Check all existing Personas (there is one subfolder for each Persona in the "stories" folder), in order to see if there is already the Persona you need for your story. You will find a readme file in each folder, describing the Persona. +2. If you find a Persona that suits you, create a new file in the respective folder, named ``Name-of-the-persona``#``progressive-number``_``KeywordRepresentingTheMainGoal``.``md``, and fill the file with your story, following this [example](https://github.com/polifonia-project/stories/blob/main/Sethus:%20Music%20Theorist/Sethus%20-%20Conflicting%20theoretical%20interpretations.md). +3. If not, create a new folder, named ``Name-of-the-persona: Primary Occupation``. If the persona has more than one role, use the primary role for naming the folder. Then, create a readme file (``readme.md``) describing the persona (see this [example]( https://github.com/polifonia-project/stories/blob/main/Sethus:%20Music%20Theorist/readme.md)), and then a file with your story as this [example](https://github.com/polifonia-project/stories/blob/main/Sethus:%20Music%20Theorist/Sethus%20-%20Conflicting%20theoretical%20interpretations.md). +4. Besides providing information about the four components of the story, you should additionally fill in a table with this information: +- ID (``Name-of-the-persona``#``progressive-number``_``KeywordRepresentingTheMainGoal``) +- Persona (name of the persona) +- Keywords (representing their goals) +- WP (WPs involved in the story) +- Pilots (pilots involved in the story) +- Priority based on a “wow” scale, reflecting the impact that would be achieved by addressing the story, choosing from: + - must have (i.e. it is something that is already supported in other systems, it is state of the art) + - life improver (i.e. I would be able to make the same discovery/work in significant less time) + - life changer (i.e. I would be able to make discoveries/works that now cannot be done or are extremely hard to do) + - breakthrough (i.e. this would be a breakthrough in my field) +5. After completing your story, you should update the "List of personas" and/or the "List of stories" in this ``README.md`` file. +- If you created a new Persona: add to "List of personas" the name of the folder, and a link to the folder, following the examples. Then, add to "List of stories" the name of the story you created, and a link to the file. +- If you reused an existing Persona for your story: add to "List of stories" the name of the story you created, and a link to the file. + +## List of personas + +- (add personas here) +- [Ralph_Music_Historian](https://github.com/polifonia-project/stories/tree/main/Ralph_Music_Historian) +- [Keith_Music_Producer](https://github.com/polifonia-project/stories/tree/main/Keith_Music_Producer) +- [Mark_Computational_Musicologist](https://github.com/polifonia-project/stories/tree/main/Mark_Computational_Musicologist) +- [Ortenz_Music_Historian](https://github.com/polifonia-project/stories/tree/main/Ortenz_Music_Historian) +- [Patrizia:ethnoantropologist](https://github.com/polifonia-project/stories/tree/main/Patrizia:ethnoantrhropologist) +- [Keoma_Architect](https://github.com/polifonia-project/stories/tree/main/Keoma_Architect) +- [Sethus_Music_Theorist](https://github.com/polifonia-project/stories/tree/main/Sethus_Music_Theorist) +- [Carolina_Music_Historian](https://github.com/polifonia-project/stories/blob/main/Carolina_Music_Historian) +- [Anna_Hearing_Impaired](https://github.com/polifonia-project/stories/tree/main/anna:hearing-impaired) +- [David_Music_Historian](https://github.com/polifonia-project/stories/tree/main/David_Music_Historian) +- [Sonia_Playlist_User](https://github.com/polifonia-project/stories/tree/main/Sonia_Playlist_User) +- [Sophia_Musicologist](https://github.com/polifonia-project/stories/tree/main/Sophia_Musicologist) +- [William_Curator_Europeana](https://github.com/polifonia-project/stories/tree/main/William_Curator_Europeana) +- [Anna_Hearing-impaired](https://github.com/polifonia-project/stories/tree/main/anna:hearing-impaired) +- [Laurent_Music_Journalist](https://github.com/polifonia-project/stories/tree/main/Laurent_Music_Journalist) +- [Andrea_Theology_Scholar](https://github.com/polifonia-project/stories/tree/main/Andrea_Theology_Scholar) +- [Valeriana_Linguist](https://github.com/polifonia-project/stories/tree//main/Valeriana_Linguist) +- [Jorge_Librarian](https://github.com/polifonia-project/stories/tree//main/Jorge_Librarian) + +## List of stories + +- (add stories here) +- [Ralph#1_wordsAndMusic](https://github.com/polifonia-project/stories/edit/main/Ralph_Music_Historian/Ralph%231_wordsAnd_Music) +- [Keith#1_musicConnections](https://github.com/polifonia-project/stories/blob/main/Keith_Music_Producer/Keith%231musicConnections.md) +- [Mark#1_FolkMusic](https://github.com/polifonia-project/stories/blob/main/Mark_Computational_Musicologist/Mark%23_1folkMusic.md) +- [Mark#2_DutchOrgans](https://github.com/polifonia-project/stories/blob/main/Mark_Computational_Musicologist/Mark%232_dutchOrgans.md) +- [Ortenz#1 MusicAndChildhood](https://github.com/polifonia-project/stories/blob/main/Ortenz_Music_Historian/Ortenz%23MusicAndChildhood.md) +- [Ortenz#2 Musical social network](https://github.com/polifonia-project/stories/blob/main/Ortenz_Music_Historian/Ortenz_-_Musical_social_network.md) +- [Patrizia#1_IdentificationOfIntangibleElements](https://github.com/polifonia-project/stories/blob/main/Patrizia:ethnoantrhropologist/Patrizia%231_IdentificationOfIntangibleElements.md) +- [Keoma#1_RestorationAndSoundPractices](https://github.com/polifonia-project/stories/blob/main/Keoma_Architect/Keoma%231_RestorationAndSoundPractices.md) +- [Sethus#1_ConflictingTheoreticalInterpretations](https://github.com/polifonia-project/stories/blob/main/Sethus_Music_Theorist/Sethus_-_Conflicting_theoretical_interpretations.md) +- [Sethus#2_CreateRelevantCorpus](https://github.com/polifonia-project/stories/blob/main/Sethus_Music_Theorist/Sethus_-_Create_relevant_corpus.md) +- [Carolina#1_SourcesCrossAnalysis](https://github.com/polifonia-project/stories/blob/main/Carolina_Music_Historian/Carolina%231_SourcesCrossAnalysis.md) +- [Anna#1_FeelingMusic](https://github.com/polifonia-project/stories/blob/main/anna:hearing-impaired/Anna%231_hearingMusic.md) +- [David#1_NonEliteMusic](https://github.com/polifonia-project/stories/blob/main/David:Music-Historian/David%231_musichistorian.md) +- [William#1 EuropeanFolkMusic](https://github.com/polifonia-project/stories/blob/main/William_Curator_Europeana/William%231_EuropeanFolkMusic.md) +- [Sonia#1_PlaylistUser](https://github.com/polifonia-project/stories/blob/main/Sonia_Playlist_User/Sonia%231_PlaylistUser.md) +- [Sophia#1 MusiciansAndTheirEnvironment](https://github.com/polifonia-project/stories/blob/main/Sophia_Musicologist/Sophia%23MusiciansAndTheirEnvironment.md) +- [Sophia#2 OriginsAndForm](https://github.com/polifonia-project/stories/blob/main/Sophia_Musicologist/Sophia%23OriginsAndForm.md) +- [Laurent#1_MusicArchives](https://github.com/polifonia-project/stories/blob/main/Laurent_Music_Journalist/Laurent%231_MusicArchives.md) +- [Andrea#1_Serendipity](https://github.com/polifonia-project/stories/blob/main/Andrea__Theology_Scholar/Andrea%231_Serendipity.md) +- [Valeriana#1_DiscourseAnalysis](https://github.com/polifonia-project/stories/blob/main/Valeriana__Linguist/Valeriana%231_DiscourseAnalysis.md) +- [Valeriana#2_Terminology](https://github.com/polifonia-project/stories/blob/main/Valeriana__Linguist/Valeriana%232_Terminology.md) +- [Jorge#1_OrganizeMyLibrary](https://github.com/polifonia-project/stories/blob/main/Jorge_Librarian/Jorge%231_OrganizeMyLibrary.md) + + + +## Useful links + +GitHub guides: +- Index https://guides.github.com/ +- Markdown https://guides.github.com/features/mastering-markdown/ +- Markdown cheatsheet: https://www.markdownguide.org/cheat-sheet/ diff --git a/content/_spice-h2020/ecosystem/content/_polifonia-project/stories/Ralph_Music_Historian/Ralph#1_WordsAndMusic.md b/content/_spice-h2020/ecosystem/content/_polifonia-project/stories/Ralph_Music_Historian/Ralph#1_WordsAndMusic.md new file mode 100644 index 00000000..9b174cd4 --- /dev/null +++ b/content/_spice-h2020/ecosystem/content/_polifonia-project/stories/Ralph_Music_Historian/Ralph#1_WordsAndMusic.md @@ -0,0 +1,41 @@ +--- +component-id: Ralph#1_WordsAndMusic +name: Words and Music +type: Story +description: Ralph is particularly interested in matching text to music, e.g. in terms of emotions. +related-components: + - persona: + - Ralph +keywords: + - music + - words +work-package: + - WP2 +pilot: [] +story type: + - expert driven +--- + + +# Words and Music + +## Persona + +Ralph + +## Goal + +Ralph is particularly interested in matching text to music, e.g. in terms of emotions. + +## Scenario + +Ralph is developing a database of librettos. He analyses these librettos to relate the words to music, in particular in how they create emotions. + +## Competency questions + +CQ1: Who is the poet (librettist) associated with a text? +CQ2: What is the emotion represented by a portion of text (~140 categories)? +CQ3: How do libretto and music relate, e.g. in describing an emotion? +CQ4: How does the presentation (text and music) of a work, e.g. an operatic aria, change over time, and depending on the artist? +CQ5: How have the music and libretto been influenced by the historical, e.g. social, political and cultural, environment? +CQ6: How can one describe and classify a scenario? diff --git a/content/_spice-h2020/ecosystem/content/_polifonia-project/stories/Ralph_Music_Historian/readme.md b/content/_spice-h2020/ecosystem/content/_polifonia-project/stories/Ralph_Music_Historian/readme.md new file mode 100644 index 00000000..e8a98744 --- /dev/null +++ b/content/_spice-h2020/ecosystem/content/_polifonia-project/stories/Ralph_Music_Historian/readme.md @@ -0,0 +1,33 @@ +--- +component-id: Ralph +name: "Ralph" +description: Researcher and Professor in Musicology +type: Persona +long-title: "Ralph" +related-components: + - Ralph#1_WordsAndMusic +project: polifonia-project +pilot: [] +--- + +# Ralph + +This is a description of the persona Ralph. + +## Name + +Ralph + +## Age + +## Occupation + +Ralph is a researcher and professor. + +## Knowledge / skills + +Ralph’s original qualification was in musicology. + +## Interests + +Ralph is interested 15th to 16th century madrigals and in 16th to 19th century opera. He has a particular interest in the development of technology for his research. diff --git a/content/_spice-h2020/ecosystem/content/_polifonia-project/stories/Sethus_Music_Theorist/Sethus#1_ConflictingTheoreticalInterpretations.md b/content/_spice-h2020/ecosystem/content/_polifonia-project/stories/Sethus_Music_Theorist/Sethus#1_ConflictingTheoreticalInterpretations.md new file mode 100644 index 00000000..3da0c7d0 --- /dev/null +++ b/content/_spice-h2020/ecosystem/content/_polifonia-project/stories/Sethus_Music_Theorist/Sethus#1_ConflictingTheoreticalInterpretations.md @@ -0,0 +1,112 @@ +--- +component-id: Sethus#1_ConflictingTheoreticalInterpretations +name: "Conflicting theoretical interpretations" +description: "In collaboration with other colleagues, Sethus is preparing a book on modal theory and its application to late Renaissance compositions." +persona: +- Sethus +keywords: +- conflicting theoretical interpretations +- analytical concepts +- modal analysis +- history of music theory +- Orlando di Lasso +work-package: +- WP2 +- WP4 +- WP5 +pilot: +- TONALITIES +priority: +- Life changer +type: Story +story type: +- expert domain +related-components: +- persona: + - Sethus +- story: + - Sethus#3_ConflictingAnalyticalAnnotations + - Sethus#2_CreateRelevantCorpus + +--- +# Conflicting theoretical interpretations + +## Persona + +Sethus is a music theorist, composer and teacher, specialized in late Renaissance music. In this field, his research focuses on the description and formalization of the compositional processes at work in modal polyphony. His reflection is based on a close reading of the scientific literature (historical and contemporary) on which he builds a synthesis oriented towards compositional practice. + +## Goal + +In collaboration with other colleagues, Sethus is preparing a book on modal theory and its application to late Renaissance compositions. He considers that the attribution of a polyphonic work to a mode implies a complex interpretative process. The criteria for this identification are not exclusively objective and fixed in the score, but always include an ‘ideological’ and symbolic dimension. Modal analysis thus depends on the theoretical frameworks that it invokes – the octoechos, the neo-classical theory of the 12 modes, etc. – and on the theoretical interpretation of the work, which transcends the work. This means that it is not the attribution of a work to a mode that is important from a musicological and compositional point of view. Instead, the question is to determine how, through the prism of different theoretical models and analytical points of view adopted, a mode manifests itself in the work, unfolds over time and governs its compositional framework (or whether, on the contrary, the work lies outside the theoretical frameworks of the modality). +- conflicting theoretical interpretations +- analytical concepts +- modal analysis +- history of music theory +- Orlando di Lasso + +## Scenario + +In the book he aims to write with his colleagues, Sethus is charged with writing a chapter on the modal cycle "Lagrime di San Pietro" by Orlando di Lasso. To this end, Sethus and his colleagues (Leonhard, Bernhard, Harold, Siegfried, Frans Robert and Alexander) carried out individual analyses and entered them into the framework of the Tonalities pilot. + +In contrast to the other pieces which do not pose major problems, the last work in the cycle, Vide homo, is highly ambiguous from a modal perspective and thus of particular interest for this book. Several modal hypotheses have been formulated by Sethus and his colleagues based on various immanent and transcendent criteria. Sethus must now examine these individual points of view in the light of the score and of the theoretical models implied. Then, he will have to confront all these points of view to propose a well-argued interpretation. + +## Competency questions + +CQ1: To which modes Vide homo has been assigned to in Tonalities? Leonhard, Bernhard, Harold, Siegfried, Frans and Alexander relate the motet to the tonus peregrinus. Robert, however, considers the work to be written in the mixolydian mode. + +CQ2: Are the criteria on which these interpretations are based true in the score and, if so, to what extent? + - What are the cadence points in the work? + - What are the part’s ranges? Do they correspond to modal octaves? + - What is the final of the bassus? + - What is the last chord? + - Are there any melodic patterns related to modality? + - To what diatonic environment does the work belong to? + - What are the part’s clefs and keys? + - Are the theoretical models on which these criteria are based – for example the Zarlinian cadential scheme ^1-^3-^5 – in line with this work? + - What is the tension between the expected theoretical criteria and their actual realisation in the work? + +CQ3: Can one assume that the modal ambiguity evidenced by the analytical interpretations is intended? Harold sees, for example, the use of modes in Lagrime as a religious symbol. Alexander suggests that Tansillo's texts set to music by Lassus can be read as an examination of conscience, advocated by post-Tridentine spirituality. Can these hypotheses be substantiated on the basis of Polifonia's analytical and heritage knowledge? + - Where does the work appear within the cycle? + - What do we know about this cycle and its place in Lassus' output? + - Which poetic text is set to music? + - Has this poetic text been set to music before? + +CQ4: Apart from Lasso’s possible exegetical intentions, what does this work tell us about the status of the modes at this moment in the history of composition? + - Is the modal ambiguity observed here frequent in Lasso’s output? + - Do other works come close, in terms of their inner properties, to what is observed here? + - Is the ambiguity observed here specific to particular groups of works? + +## Resources +Bibliographie + +Barbier, Jacques. “Un Homme Armé à Bruxelles. Étude De La Messe De Mathurin Forestier Contenue Dans L’Occo Codex”, Revue Belge De Musicologie / Belgisch Tijdschrift Voor Muziekwetenschap, vol. 55, 2001, pp. 53-68. + +Ceulemans, Anne-Emmanuelle. “Cadential and Modal Treatment in Palestrina’s Delle Madrigali Spirituali a Cinque Voci Libro Secondo (1594) and Lasso’s Lagrime di San Pietro (1595)”, Musurgia, vol. XXVI, no. 2, 2019, pp. 71-9. + +Ceulemans, Anne-Emmanuelle. Lasso, Meier, Powers. The Reality of the Modes under Scrutiny, 9th European Music Analysis Conference - EUROMAC 9. + +Crook, David. Orlando di Lasso’s Imitation Magnificats for Counter-Reformation Munich. Princeton, New Jersey: Princeton University Press, 1994. + +Einstein, Alfred. The Italian Madrigal. Princeton, New Jersey: Princeton University Press, 1949. + +Fisher, Alexander J. “‘Per Mia Particolare Devotione’: Orlando Di Lasso’s Lagrime Di San Pietro and Catholic Spirituality in Counter-Reformation Munich”, Journal of the Royal Musical Association, vol. 132, no. 2, 2007, pp. 167-220. + +Freedman, Richard. “Le Jeune’s ‘Dodecacorde’ as a Site for Spiritual Meanings”, Revue De Musicologie, vol. 89, no. 2, 2003, pp. 297-309. + +Freedman, Richard. “‘Marenzio’s Madrigali a Quattro, Cinque Et Sei Voci of 1588: A Newly-Revealed Madrigal Cycle and Its Intellectual Context’”, The Journal of Musicology, vol. 13, no. 3, 1995, pp. 318-54. + +Gissel, Siegfried. “Die Tonarten Der ‘Lagrime Di San Pietro’ Von Orlando Di Lasso”, Musica Disciplina, vol. 47, 1993, pp. 5-33 + +Lino, Daniela Francine and Fiorini, Carlos Fernando. “Lagrime di San Pietro by Orlando di Lasso in a New and Revised Critical Edition”, Opus, vol. 18, no. 2, 2012, pp. 111-40. + +Lino Popolin, Daniela Francine. Lagrime di San Pietro de Orlando di Lasso: um estudo de preparação e execução através de uma nova edição crítica e revisada, Universidade Estadual de Campinas, PhD., 2013. + +Luoma, Robet. Music, Mode, and Words in Orlando Di Lasso’s Last Works, Lewiston, N.Y., U.S.A.: E. Mellen Press, 1989 (Studies in the History & Interpretation of Music 11). + +Meier, Bernhard. Die Tonarten der klassischen Vokalpolyphonie nach den Quellen dargestellt, Utrecht: Oosthoek, Scheltema & Holkema, 1974. + +Powers, Harold S. “Is Mode Real? Pietro Aron, the Octenary System, and Polyphony”, in Basler Jahrbuch für historische Musikpraxis, vol. 16, 1992, pp. 9-52. + +Procter, Michael. “The Cyclic Works of Orlando di Lasso”, Sacred Music, vol. 134, no. 1, 2007, pp. 12-4. + + diff --git a/content/_spice-h2020/ecosystem/content/_polifonia-project/stories/Sethus_Music_Theorist/Sethus#2_CreateRelevantCorpus.md b/content/_spice-h2020/ecosystem/content/_polifonia-project/stories/Sethus_Music_Theorist/Sethus#2_CreateRelevantCorpus.md new file mode 100644 index 00000000..07f36b62 --- /dev/null +++ b/content/_spice-h2020/ecosystem/content/_polifonia-project/stories/Sethus_Music_Theorist/Sethus#2_CreateRelevantCorpus.md @@ -0,0 +1,67 @@ +--- +component-id: Sethus#2_CreateRelevantCorpus +name: "Create a relevant corpus" +description: "Sethus prepares a book on a specific aspect of compositional practices during the Renaissance period, and aims at finding a set of relevant references to sources to illustrate his text." +keywords: +- multi-criteria search +- corpus navigation based on facets +- virtual corpus, defined by intention +- history of music theory +work-package: +- WP1 +- WP2 +- WP3 +pilot: +- FACETS +priority: +- Life changer +type: Story +story type: +- expert driven +related-components: +- persona: + - Sethus +- story: + - Sethus#1_ConflictingTheoreticalInterpretations + - Sethus#3_ConflictingAnalyticalAnnotations +--- +# Create a relevant corpus +## Persona + +Sethus is a music theorist, composer and teacher, specialized in late Renaissance music. In this field, his research focuses on the +description and formalization of the compositional processes at work in modal polyphony. His reflection is based on a close reading of +the scientific literature (historical and contemporary) on which he builds a synthesis oriented towards compositional practice. + +## Goal + +Sethus prepares a book on a specific aspect of compositional practices during the Renaissance period, and aims at finding a set of relevant references to sources to illustrate his text. Each concept +addressed in the book (e.g., modal cadences, imitations, reverse imitations, structure of pieces, modes, etc.) should be illustrated by a list of fragments taken from pieces of various authors, various periodes, +various compositional styles, etc. Ideally, a same piece would serve as a source to illustrate several concepts. Fragments should be easy to refer to, annotated, quotable, +and interlinked to highlight their relationship, differences / resemblances (ie the link itself should be annotated to express its purpose). + +Sethus can access a large corpus of music resource, including music scores encoded in MEI. However most of which are either irrelevant, or not suited (lack of representative fragments, or outliers far from the canonical +occurrences which are searched for). His goal is therefore to extract or organize according to his need a subset of the corpus that serves as a source for the book illustration. + +## Scenario + +Sethus uses a search tool that allows to explore the corpus at hand based on several criteria. An initial search can be done based on melodic profile (say, a typical cadence in a soprano line). The first result set is then automatically subdivided by the +system based on secondary criteria (period, mode, composer for instance). Sethus can fix this initial organization and refine the search on each part of the hierarchy thereby created, and +reinitialize the process by choosing another initial criteria. Along the way, identified fragments can be annotated, and the system keeps track of the search pattern that characterizes a level in the result set hirarchy. +Additional tools can be used to further reinforce the structure of the result set, leading at a final Relevant Corpus with a rich internal structure and annotations, whose definition +(the set queries and the hierarchy of facets) can be stored and later reconsidered. + +## Competency questions + +CQ1: Search operations should be expressed on either raw-level features (eg melodic lines) or higher-level features (eg modes, cadences, triad patterns, etc.). The systm should retrieve efficiently all sources that match the search pattern, and identifiy pattern occurrences + +CQ2: A subset can be hierarchically refined based on criteria independant from on another and independant from the initial search pattern. The user can navigate in this hierarchy and refine the search. + +CQ3: At each step, the result set takes the form of a subcorpus organized according to rich relationships, and enriched with user annotations + +CQ4: A result set can be stored (in intention, not in extension) as a Relevant Corpus, and its definition can be revised at any moment + +CQ5: navigating through a Relevant Corpus is based on anotation and search patterns + + +## Resources +Bibliographie diff --git a/content/_spice-h2020/ecosystem/content/_polifonia-project/stories/Sethus_Music_Theorist/Sethus#3_ConflictingAnalyticalAnnotations.md b/content/_spice-h2020/ecosystem/content/_polifonia-project/stories/Sethus_Music_Theorist/Sethus#3_ConflictingAnalyticalAnnotations.md new file mode 100644 index 00000000..e4930fa0 --- /dev/null +++ b/content/_spice-h2020/ecosystem/content/_polifonia-project/stories/Sethus_Music_Theorist/Sethus#3_ConflictingAnalyticalAnnotations.md @@ -0,0 +1,156 @@ +--- +component-id: Sethus#3_ConflictingAnalyticalAnnotations +name: "Conflicting analytical annotations" +description: "In collaboration with other colleagues, Sethus is preparing a book on modal theory and its application to late Renaissance compositions." +keywords: +- conflicting analytical interpretations +- analytical concepts +- modal analysis +- history of music theory +- Orlando di Lasso +work-package: +- WP1 +- WP2 +- WP4 +- WP5 +pilot: +- TONALITIES +priority: +- Life changer +type: Story +story type: +- expert domain +related-components: +- persona: + - Sethus +- story: + - Sethus#1_ConflictingTheoreticalInterpretations + - Sethus#2_CreateRelevantCorpus +--- +# Conflicting analytical annotations + +## Persona + +Sethus is a music theorist, composer and teacher, specialized in late Renaissance music. In this field, his research focuses on the description and formalization of the compositional processes at work in modal polyphony. His reflection is based on a close reading of the scientific literature (historical and contemporary) on which he builds a synthesis oriented towards compositional practice. + +## Goal + +In collaboration with other colleagues, Sethus is preparing a book on modal theory and its application to late Renaissance compositions. He considers that the attribution of a polyphonic work to a mode implies a complex interpretative process. The criteria for this identification are not exclusively objective and fixed in the score, but always include an ‘ideological’ and symbolic dimension. Modal analysis thus depends on the theoretical frameworks that it invokes – the octoechos, the neo-classical theory of the 12 modes, etc. – and on the theoretical interpretation of the work, which transcends the work. This means that it is not the attribution of a work to a mode that is important from a musicological and compositional point of view. Instead, the question is to determine how, through the prism of different theoretical models and analytical points of view adopted, a mode manifests itself in the work, unfolds over time and governs its compositional framework (or whether, on the contrary, the work lies outside the theoretical frameworks of the modality). +- conflicting theoretical interpretations +- analytical concepts +- modal analysis +- history of music theory +- Orlando di Lasso + +## Scenario + +In the book he aims to write with his colleagues, Sethus is charged with writing a chapter on the modal cycle "Lagrime di San Pietro" by Orlando di Lasso. To this end, Sethus and his colleagues (Leonhard, Bernhard, Harold, Siegfried, Frans Robert and Alexander) carried out individual analyses and entered them into the framework of the Tonalities pilot. + +In contrast to the other pieces which do not pose major problems, the last work in the cycle, Vide homo, is highly ambiguous from a modal perspective and thus of particular interest for this book. Several modal hypotheses have been formulated by Sethus and his colleagues based on various immanent and transcendent criteria. Sethus must now examine these individual points of view in the light of the score and of the theoretical models implied. Then, he will have to confront all these points of view (Analysis#1 and Analysis#2) to propose a well-argued interpretation. + +## Competency questions + +CQ1: To which modes Vide homo has been assigned to in Tonalities? Leonhard, Bernhard, Harold, Siegfried, Frans and Alexander relate the motet to the tonus peregrinus. Robert, however, considers the work to be written in the mixolydian mode. + +CQ2: Are the criteria on which these interpretations are based true in the score and, if so, to what extent? + +CQ3: What are the cadence points in the work? + +CQ4: What are the part’s ranges? Do they correspond to modal octaves? + +CQ5: What is the final of the bassus? + +CQ6: What is the last chord? + +CQ7: Are there any melodic patterns related to modality? + +CQ8: To what diatonic environment does the work belong to? + +CQ9: What are the part’s clefs and keys? + +CQ10: Are the theoretical models on which these criteria are based – for example the Zarlinian cadential scheme ^1-^3-^5 – in line with this work? + +CQ11: What is the tension between the expected theoretical criteria and their actual realisation in the work? + +[Metadata section] + +CQ12: To what corpus does this work belong? + +CQ13: What is the name of the corpus? + +CQ14: What is the pseudonyme of the composer for X? + +CQ15: What is the birthdate of the composer? + +CQ16: What is the date of death of the composer? + +CQ17: To what genre does the composition belong? + +CQ18: To what collection does of the composition belong? + +CQ19: If this is the case, to what larger work does the composition belong? + +CQ20: If this is the case, in what other work(s) is the compositions quoted? + +CQ21: Does the composition contain any quotation from other composition(s)? + +CQ22: What is the origin of the composition? + +CQ23: If available online, what is the URL of the composition? + +CQ24: In the case of a manuscript/printed source, where is it housed? + +CQ25: Who is the scientific editor of the composition? + +CQ26: If any, what is the license of the transcription? + +CQ27: What is the year of publication of the music printed source? + +CQ28: Where was the music printed source published? + +CQ29: What is the editor of the music printed source? + +CQ30: In what electronic format is the composition available? + +CQ31: What is the license of the final reused/modified electronic format of the score? + + +## Example Data +[Analysis#1 (Marco Gurrieri)](https://github.com/polifonia-project/stories/blob/main/Sethus:%20Music%20Theorist/Analysis%231.pdf) + +[Analysis#2 (Christophe Guillotel-Nothmann)](https://github.com/polifonia-project/stories/blob/main/Sethus:%20Music%20Theorist/Analysis%232.pdf) + +## Resources +Bibliographie + +Barbier, Jacques. “Un Homme Armé à Bruxelles. Étude De La Messe De Mathurin Forestier Contenue Dans L’Occo Codex”, Revue Belge De Musicologie / Belgisch Tijdschrift Voor Muziekwetenschap, vol. 55, 2001, pp. 53-68. + +Ceulemans, Anne-Emmanuelle. “Cadential and Modal Treatment in Palestrina’s Delle Madrigali Spirituali a Cinque Voci Libro Secondo (1594) and Lasso’s Lagrime di San Pietro (1595)”, Musurgia, vol. XXVI, no. 2, 2019, pp. 71-9. + +Ceulemans, Anne-Emmanuelle. Lasso, Meier, Powers. The Reality of the Modes under Scrutiny, 9th European Music Analysis Conference - EUROMAC 9. + +Crook, David. Orlando di Lasso’s Imitation Magnificats for Counter-Reformation Munich. Princeton, New Jersey: Princeton University Press, 1994. + +Einstein, Alfred. The Italian Madrigal. Princeton, New Jersey: Princeton University Press, 1949. + +Fisher, Alexander J. “‘Per Mia Particolare Devotione’: Orlando Di Lasso’s Lagrime Di San Pietro and Catholic Spirituality in Counter-Reformation Munich”, Journal of the Royal Musical Association, vol. 132, no. 2, 2007, pp. 167-220. + +Freedman, Richard. “Le Jeune’s ‘Dodecacorde’ as a Site for Spiritual Meanings”, Revue De Musicologie, vol. 89, no. 2, 2003, pp. 297-309. + +Freedman, Richard. “‘Marenzio’s Madrigali a Quattro, Cinque Et Sei Voci of 1588: A Newly-Revealed Madrigal Cycle and Its Intellectual Context’”, The Journal of Musicology, vol. 13, no. 3, 1995, pp. 318-54. + +Gissel, Siegfried. “Die Tonarten Der ‘Lagrime Di San Pietro’ Von Orlando Di Lasso”, Musica Disciplina, vol. 47, 1993, pp. 5-33 + +Lino, Daniela Francine and Fiorini, Carlos Fernando. “Lagrime di San Pietro by Orlando di Lasso in a New and Revised Critical Edition”, Opus, vol. 18, no. 2, 2012, pp. 111-40. + +Lino Popolin, Daniela Francine. Lagrime di San Pietro de Orlando di Lasso: um estudo de preparação e execução através de uma nova edição crítica e revisada, Universidade Estadual de Campinas, PhD., 2013. + +Luoma, Robet. Music, Mode, and Words in Orlando Di Lasso’s Last Works, Lewiston, N.Y., U.S.A.: E. Mellen Press, 1989 (Studies in the History & Interpretation of Music 11). + +Meier, Bernhard. Die Tonarten der klassischen Vokalpolyphonie nach den Quellen dargestellt, Utrecht: Oosthoek, Scheltema & Holkema, 1974. + +Powers, Harold S. “Is Mode Real? Pietro Aron, the Octenary System, and Polyphony”, in Basler Jahrbuch für historische Musikpraxis, vol. 16, 1992, pp. 9-52. + +Procter, Michael. “The Cyclic Works of Orlando di Lasso”, Sacred Music, vol. 134, no. 1, 2007, pp. 12-4. + + diff --git a/content/_spice-h2020/ecosystem/content/_polifonia-project/stories/Sethus_Music_Theorist/readme.md b/content/_spice-h2020/ecosystem/content/_polifonia-project/stories/Sethus_Music_Theorist/readme.md new file mode 100644 index 00000000..f7e8a9a9 --- /dev/null +++ b/content/_spice-h2020/ecosystem/content/_polifonia-project/stories/Sethus_Music_Theorist/readme.md @@ -0,0 +1,36 @@ +--- +component-id: Sethus +name: "Sethus" +description: "Sethus's research focuses on the description and formalization of the compositional processes at work in modal polyphony." +long-title: "Sethus" +type: Persona +related-components: +- story: + - Sethus#1_ConflictingTheoreticalInterpretations + - Sethus#2_CreateRelevantCorpus + - Sethus#3_ConflictingAnalyticalAnnotations +--- + +# Sethus + +This is a description of the persona Sethus + +## Name +Sethus + +## Age + + +## Occupation +- primary role + - Music theorist +- secondary roles + - Composer + - Teacher + +## Knowledge/Skills +In this field, his research focuses on the description and formalization of the compositional processes at work in modal polyphony. His reflection is based on a close reading of the scientific literature (historical and contemporary) on which he builds a synthesis oriented towards compositional practice. + +## Interests +He is a specialist in late Renaissance music + diff --git a/content/_spice-h2020/ecosystem/content/_polifonia-project/stories/Sonia_Playlist_User/Sonia#1_ExplorationMode.md b/content/_spice-h2020/ecosystem/content/_polifonia-project/stories/Sonia_Playlist_User/Sonia#1_ExplorationMode.md new file mode 100644 index 00000000..406867ee --- /dev/null +++ b/content/_spice-h2020/ecosystem/content/_polifonia-project/stories/Sonia_Playlist_User/Sonia#1_ExplorationMode.md @@ -0,0 +1,52 @@ +--- +component-id: Sonia#1_ExplorationMode +name: "Exploration mode" +description: "Sonia likes exploring music, either finding out more about music and musicians she already knows or uncovering new interests." +type: Story +keywords: +- Playlist +work-package: +- WP2 +- WP3 +- WP4 +- WP5 +pilot: +- INTERLINK +story type: +- expert driven +related-components: +- persona: + - Sonia +- story: + - Sonia#2_ShuffleMode + - Sonia#3_StatsMode +--- +# Exoloration mode + +## Persona +Sonia + +## Goal +Sonia likes exploring music, either finding out more about music and musicians she already knows or uncovering new interests. + +## Scenario +When she gets home, she opens the intelligent visual playlist on her tablet computer. She uses this to play some of her favourite pieces of music in the playlist and also find interesting connections that might lead her to new musical discoveries. First, she selects a particular piece of music that she would like to hear. As the piece is playing, the playlist app creates a dynamic visualisation of the piece and interesting connections to images, textual information, multimedia and other musical pieces inside and outside of her playlist. As the piece is playing she sees a visualisation of a melodic pattern in the piece and other pieces of music in which it also appears. The dynamic visualisation also skips through information about the composer, the musicians and the time and place the piece was recorded. She notices that another piece of music she likes was recorded at the venue. Excerpts from the lyrics are also included in the visualisation. She sees that a particular lyrical phrase is taken from a novel and features in other pieces of music. Background information is also presented about a person mentioned in the lyrics. + + +## Competency questions + +- CQ1: What is the musical piece that the persona selects? +- CQ2: With which image/textual information/multimedia/musical piece is the selected musical piece connected to inside/outside the playlist? +- CQ3: Which is the melodic pattern of the musical piece? +- CQ4: In which other musical pieces does the melodic pattern appear? +- CQ5: Who is/are the composer(s)/musician(s) of the musical piece? +- CQ6: What information is available for the composer(s)/musician(s)? +- CQ7: When/Where was the musical piece recorded? +- CQ8: Has another musical piece "the user likes" been recorded in the same venue? +- CQ9: Are there excerpts of the lyrics available for a musical piece? If yes, which are they? +- CQ10: Is the excerpt of the lyric featured in other musical pieces? +- CQ11: Which is the source of the excerpt of the lyrics? +- CQ12: Is there a person mentioned in the excerpt of the lyrics? If yes, what background information is available for him/her? + +## Resources + diff --git a/content/_spice-h2020/ecosystem/content/_polifonia-project/stories/Sonia_Playlist_User/Sonia#2_ShuffleMode.md b/content/_spice-h2020/ecosystem/content/_polifonia-project/stories/Sonia_Playlist_User/Sonia#2_ShuffleMode.md new file mode 100644 index 00000000..301262b0 --- /dev/null +++ b/content/_spice-h2020/ecosystem/content/_polifonia-project/stories/Sonia_Playlist_User/Sonia#2_ShuffleMode.md @@ -0,0 +1,50 @@ +--- +component-id: Sonia#2_ShuffleMode +name: "Shuffle mode" +description: "Sonia likes exploring music, either finding out more about music and musicians she already knows or uncovering new interests." +keywords: +- Playlist, shuffle +work-package: +- WP2 +- WP3 +- WP4 +- WP5 +pilot: +- INTERLINK +type: Story +story type: +- expert driven +related-components: +- persona: + - Sonia +- story: + - Sonia#1_ExplorationMode + - Sonia#3_StatsMode +--- +# Shuffle mode + +## Persona +Sonia + +## Goal +Sonia likes exploring music, either finding out more about music and musicians she already knows or uncovering new interests. + +## Scenario +While making dinner, she puts the playlist into shuffle mode and stands the tablet computer on the kitchen surface. +Rather than being random, shuffle mode dynamically creates a pathway through the playlist based on meaningful connections. +As each piece plays, the playlist app visualises interconnections to past and future steps in the musical pathway. +Shuffle mode can make connections according to a wide range of features such as the composer, musicians, lyrics, melodic patterns, +locations and historical events. At any point she can skip to the next song in the pathway. She can also alter shuffle mode to focus +on particular types of connections. She decides to focus on connections between people to create a sort of family tree through the playlist.! + + +## Competency questions + +- CQ1: What is the connection between two musical pieces? +- CQ2: Is a musical piece connected to another musical piece by historical event? +- CQ3: Which feature is selected to create the shuffle? +- CQ4: What are the types of features that are used to create shuffles? +- CQ5: Which is the immediate previous/next musical piece? +- CQ6: Is a musical piece connected to another musical piece by common people? + +## Resources diff --git a/content/_spice-h2020/ecosystem/content/_polifonia-project/stories/Sonia_Playlist_User/Sonia#3_StatsMode.md b/content/_spice-h2020/ecosystem/content/_polifonia-project/stories/Sonia_Playlist_User/Sonia#3_StatsMode.md new file mode 100644 index 00000000..fb495a84 --- /dev/null +++ b/content/_spice-h2020/ecosystem/content/_polifonia-project/stories/Sonia_Playlist_User/Sonia#3_StatsMode.md @@ -0,0 +1,63 @@ +--- +component-id: Sonia#3_StatsMode +name: "Stats mode" +description: "Sonia likes to explore her playlist history, find interesting statistics and patterns and see how it compares to other music listeners." +keywords: +- Playlist, statistics, history, visualisation, comparison +WP: +- WP2 +- WP3 +- WP4 +- WP5 +pilot: +- INTERLINK +type: Story +story type: +- expert driven +related-components: +- persona: + - Sonia +- story: + - Sonia#1_ExplorationMode + - Sonia#2_ShuffleMode +--- +# Stats mode + +## Persona +Sonia + +## Goal +Sonia likes to explore her playlist history, find interesting statistics and patterns and see how it compares to other music listeners. + +## Scenario +Sonia has been using the Polifonia Music Player app [1] for some time. She likes using the intelligent agents to find out more about what she is listening to and extend her playlist with their recommendations. When listening to music she also likes to use companion apps that can give her statistics and visualisations of what she has been listening to such as last.fm [2], Obscurify [3], Stats for Spotify [4], MusicScape [5], Discover Quickly [6], Spotify Wrapped [7] and Apple Music Replay [8]. + +What she likes about these apps is that they can provide her with an overview of what she has been listening to over the past weeks, months or year and compare her listening experience against other music listeners. She is excited to see that Polifonia has launched its own companion app so connects it to the playlist history of her Polifonia Music Player app. As well as showing her statistics on what she has been listening to, such as her favourite albums, songs and genres, the app can use information supplied by the intelligent agents to provide more information about her listening history and visualise it in interesting ways. + +The app shows her a visualization of harmonic patterns that commonly appear on her playlist. She selects one of the patterns and can see the breakdown of what types of songs it appears in. Although most of the songs it appears in are in the rock genre, it also features in some of her favourite classical pieces. She wonders whether that could be part of the reason she likes those classical pieces. She also follows a link to an article explaining the history of this chord progression and why it features on a number of rock tracks. She shares this with some of her friends. + +She looks at a visualisation of the lyrics that appear in the songs she listens to. Phrases related to family and children are unusually common in the songs she listens to. In fact, the app tells her that these phrases appear on her playlist more than 90% of other music listeners. She wonders why this might be. As she drills down she can see that they have been used a number of times by some of her favourite bands. She had never noticed that before and wonders whether those bands need to look for new inspiration for their lyrics. + +The app can also visualise her listening history by time and location. There is a large peak of songs released in 2012 and she remembers how much time she spent listening to new songs during that summer. She can also see how narrow her taste in music was during that time. The visualization shows her how her taste in music overall is far more eclectic than the 2012 songs and more eclectic than 95% of other listeners. She feels quite proud of this achievement and shares it on social media. She can also explore her playlist overlaid on a map. She is surprised by some of the cities that feature on her map due to being places where her played songs were recorded. She sees that many of her favourite indie tracks by different bands were recorded in the same city, in many cases in the same studio. She looks for other songs recorded in the same studio and adds them to her Polifonia playlist. She also notices that one of the music labels is based in that city and many of her favourite bands have performed in the city, sometimes at the same gig. + + + +## Competency questions + + +## Resources +[1] https://polifonia-project.github.io/sonar2021_demo/ + +[2] https://www.last.fm + +[3] https://obscurifymusic.com/ + +[4] https://www.statsforspotify.com + +[5] https://musicscapes.herokuapp.com + +[6] https://discoverquickly.com + +[7] https://www.spotify.com/uk/wrapped/ + +[8] https://music.apple.com/replay diff --git a/content/_spice-h2020/ecosystem/content/_polifonia-project/stories/Sonia_Playlist_User/readme.md b/content/_spice-h2020/ecosystem/content/_polifonia-project/stories/Sonia_Playlist_User/readme.md new file mode 100644 index 00000000..c47a1a96 --- /dev/null +++ b/content/_spice-h2020/ecosystem/content/_polifonia-project/stories/Sonia_Playlist_User/readme.md @@ -0,0 +1,35 @@ +--- +component-id: Sonia +name: "Sonia" +description: "Lecturer, music producer and festival director interested in finding new music." +long-title: "Sonia" +type: Persona +related-components: +- story: + - Sonia#1_ExplorationMode + - Sonia#2_ShuffleMode + - Sonia#3_StatsMode +--- + +# Sonia + +A lecturer, music producer and festival director interested in finding new music. + +## Name +Sonia + +## Age +27 + +## Occupation +Sonia has a variety of roles. She lectures in new media and also works as a music producer and festival director. Her work as a festival director requires her to find new artists and music that could feature in future festivals. + +## Knowledge/Skills +Sonia studied computing and new media at university. + +## Interests +Sonia has a broad interest in all kinds of music. She likes using streaming services, social media and online information to explore her interests in music. + + +## Links +You can find more detailed information about related issues and progress for the Sonia persona in this [project](https://github.com/polifonia-project/stories/projects/1). diff --git a/content/_spice-h2020/ecosystem/content/_polifonia-project/stories/Sophia_Musicologist/Sophia#1_MusiciansAndTheirEnvironment.md b/content/_spice-h2020/ecosystem/content/_polifonia-project/stories/Sophia_Musicologist/Sophia#1_MusiciansAndTheirEnvironment.md new file mode 100644 index 00000000..2b95863e --- /dev/null +++ b/content/_spice-h2020/ecosystem/content/_polifonia-project/stories/Sophia_Musicologist/Sophia#1_MusiciansAndTheirEnvironment.md @@ -0,0 +1,61 @@ +--- +component-id: Sophia#1_MusiciansAndTheirEnvironment +name: "Musicians and their environment" +type: Story +description: Sophia is interested in understanding the social-historical reasons behind how the music was created and how it sounds. +related-components: + - persona: + - Sophia + - story: + - Sophia#2_OriginsAndForm + - Sophia#3_Reorchestration +keywords: +- 16th century +- 17th century +- italian music +- composers +- people network +- text analysis +work-package: + - WP5 +pilot: + - MEETUPS +story type: + - expert driven +--- +# Musicians and their environment + +## Persona + +Sophia is a musicologist and a practising musician. + +## Goal + +Sophia is interested in understanding the social-historical reasons behind how the music was created and how it sounds. + +## Scenario + +Sophia is doing social-historical research, using textual sources. One of her current projects is concerned with the relationship between music, medicine and religion at a particular charitable institution in 17th century Italy. The sources she uses, which are also not digitized, include records of an institution, payslips etc. + +## Competency questions + +CQ1. What was the composer’s network (patrons, institutions …)? + + * What is the “intellectual framework” of a musician? which explains why certain music was played and how it was played + * What is the time relationship between different musicians, e.g. who was working at the same time? + * What relationships exist between institutions, e.g. employment of the same musicians; and between people and institutions, e.g. between a number of composers and a printer? + +CQ2. What relationships exist between different sources of information, e.g. payslips and accounts? + + +## Resources + +The resources which Sophia uses are often not digitized. Therefore she needs to visit libraries. Whilst there, she will sift through the appropriate material, and take photographs of the most relevant. She will then analyze these photographs on her return home. In one particular library she may not be able to take photographs, and there she can only take notes. Sophia uses spreadsheets to maintain and compare information. + +Sophia’s work could be helped by: + + * digitalisation of her source material; + * handwriting OCR; + * data visualization, e.g. to illustrate relationships between composers, institutions etc.; + * use of a database, i.e. as upgrade to current use of spreadsheets; + * named-entity recognition diff --git a/content/_spice-h2020/ecosystem/content/_polifonia-project/stories/Sophia_Musicologist/Sophia#2_OriginsAndForm.md b/content/_spice-h2020/ecosystem/content/_polifonia-project/stories/Sophia_Musicologist/Sophia#2_OriginsAndForm.md new file mode 100644 index 00000000..18e26f67 --- /dev/null +++ b/content/_spice-h2020/ecosystem/content/_polifonia-project/stories/Sophia_Musicologist/Sophia#2_OriginsAndForm.md @@ -0,0 +1,77 @@ +--- +component-id: Sophia#2_OriginsAndForm +name: "Origins and form" +type: Story +description: Sophia is interested in understanding how the composer was influenced, e.g. by which other composers. +related-component: +- persona: + - Sophia +- story: + - Sophia#1_MusiciansAndTheirEnvironment + - Sophia#3_Reorchestration +keywords: +- 16th century +- 17th century +- italian music +- music structure +- language +work-package: +- WP5 +story type: +- expert driven +--- +# Origins and form + +## Persona + +Sophia is a musicologist and a practising musician + +## Goal + +Sophia is interested in understanding how the composer was influenced, e.g. by which other composers; how the music ‘works’; and why we react to the music in the way we do. She is interested in parallels between music and language. + +## Scenario +Current scenario: +Sophia is making a detailed analysis of the music itself, e.g. looking at notes in a motif and analysing how the motif changes in the course of the music. This work involves very painstaking manual analysis, in part because the material is not digitized. She is also interested in performance practice, i.e. how the music was played and what conventions were used. + +Future scenario: +Sophia is interested in understanding the musical compositions of Frescobaldi, how they varied, relations between the music and the vocabulary, and identify similarities and differences to his contemporaries. Sophia is analysing a Frescobaldi composition and notices a particular motif that accompanies a reference to birdsong. She decides to see where else this motif can be found in the compositions of Frescobaldi, the compositions of his contemporaries and also investigate the language accompanying the motif. Sophia specifies a motif as a sequence of notes with a particular pitch and rhythm. She can search for precise matches of this motif across the catalogue. She can manipulate the precision level of the motif and colour code compositions depending on extent to which they match (e.g. a shade of blue is used to flag compositions containing a motif matching 3 of the 4 notes). Sophia can also see summaries of the words associated with the motif. +Sophia can use the visualisation to see relationships to other composers in terms of the use of this motif and its variations. She also notices that the motif is sometimes associated with certain words or themes. The system can automatically provide Sophia with statistical analyses as to how the motif and its variations differ across composers and across compositions containing certain vocabulary. Sophia can save and annotate the result to use in her research. + +## Competency questions + +CQ1. What parallels are there between the composition of music and the use of language, e.g. in the use of rhetorical delivery? + + * What rhetorical strategies are used in a piece of music? + +CQ2. Why was a piece of music written to be evocative of something, e.g. the call of a cuckoo? + + * What does music evoke? + +CQ3. What different motifs exist in a piece of music? + +CQ4. What relationships exist in a multi-line piece of music, i.e. over time and between the lines? + +CQ5. In early written music, what is left of the preceding unwritten tradition? + + * What are the sources the musician used? + * What sources, musicians, traditions influenced the musician? + +CQ6. How does the corpus of a composer compare with that of other composers, particularly those writing at the same time? + + * What are the similarities? motifs, harmony, texts + +CQ7. Can we use a statistical comparison, or a particular ‘thumbprint’ in the music, to identify the composer of a piece of music? + +CQ8. When a musical structure, e.g. a motif, is found in two composers, is one copied from the other, or do they come out of a previous tradition? + + +## Resources + +Sophia works with musical scores, written in a notation which is not the same as the modern notation. She has now translated some of the music to modern notation, using [Dorico](https://new.steinberg.net/dorico/?gclid=EAIaIQobChMIr-zyg_iM8AIVAevtCh1MPA4oEAAYASAAEgJBt_D_BwE); this enables her to hear the music. + +Sophia’s work could be helped by: + + * digitalisation of her source material; + * digital analysis techniques, e.g. statistical analysis, waveform analysis and visualization; + * manuscript OCR. diff --git a/content/_spice-h2020/ecosystem/content/_polifonia-project/stories/Sophia_Musicologist/Sophia#3_Reorchestration.md b/content/_spice-h2020/ecosystem/content/_polifonia-project/stories/Sophia_Musicologist/Sophia#3_Reorchestration.md new file mode 100644 index 00000000..41fc7e31 --- /dev/null +++ b/content/_spice-h2020/ecosystem/content/_polifonia-project/stories/Sophia_Musicologist/Sophia#3_Reorchestration.md @@ -0,0 +1,59 @@ +--- +component-id: Sophia#3_Reorchestration +name: Reorchestration +type: Story +description: "Sophia needs to study the instrumentation of a musical composition of the 18th century in order to organize a music festival that compares philological and modern performances and that proposes music compositions of that time with a completely new orchestration." +related-components: + - persona: + - Sophia + - story: + - Sophia#1_MusiciansAndTheirEnvironment + - Sophia#3_Reorchestration +keywords: +- orchestration +- instrumentation +work-package: + - WP2 +pilot: + - To be completed +story type: + - expert driven +--- + +# Reorchestration + +## Persona +Sophia’s original qualifications, and her Ph.D., were in music and she is also a practising musician. She is a musicologist working in the music department of a university. She has a particular interest in late 16th to 18th century music, specifically that of a particular composer living and working in Rome, Wien (Haydn, Mozart, Beethoven) London and Paris. + +## Goal +- Classical music +- Orchestration +- Instrumentation + +## Scenario +As musician and historian, Sophia needs to study the instrumentation of a musical composition of the 18th century in order to organize a music festival that compares philological and modern performances and that proposes music compositions of that time with a completely new orchestration. To do this, she needs detailed informations about musical instruments, ancient and modern: how they were used in the original compositions and in the 18th century, to which instrument family each instrument belongs, in which instruments combination they have been used in musical compositions from any era. For each instrument Sophia will have to take into account these important properties: the timbre or range of timbres; the range of pitches; the dynamic range. Sophia is also interested to know the instrument's notation. + +## Competency questions + +CQ1: What are the instruments that play the musical piece? + +CQ2: In which group does the instrument belong to? + +CQ3: In which family does the instrument belong to? + +CQ4: Which is the timbre of the instrument? + +CQ5: Which is the range of pitches available on the instrument? + +CQ6: Which is the dynamic range of the instrument? + +CQ7: Which is the notation convention of the instrument? + +CQ8: With which genre is the instrument related to? (To be considered) + +CQ9: In what genre of music the instrument was used? + +CQ10: In what combinations of instruments the instrument was used? + +CQ11: What are the differences between ancient and modern version of the same instrument? + diff --git a/content/_spice-h2020/ecosystem/content/_polifonia-project/stories/Sophia_Musicologist/readme.md b/content/_spice-h2020/ecosystem/content/_polifonia-project/stories/Sophia_Musicologist/readme.md new file mode 100644 index 00000000..9a3fef56 --- /dev/null +++ b/content/_spice-h2020/ecosystem/content/_polifonia-project/stories/Sophia_Musicologist/readme.md @@ -0,0 +1,43 @@ +--- +component-id: Sophia +name: "Sophia" +description: "Sophia’s original qualifications (including her Ph.D.) were in music. She is also a practising musician." +long-title: "Sophia" +type: Persona +work-package: + - WP2 + - WP4 + - WP5 +pilot: + - MEETUPS +related-components: + - story: + - Sophia#1_MusiciansAndTheirEnvironment + - Sophia#2_OriginsAndForm + - Sophia#3_Reorchestration +--- + +# Sophia + +This is a description of the persona Sophia. + +## Name +Sophia + +## Age + + +## Occupation + + * Primary role: Musicologist + * Secondary role: + * musician + * historian + +## Knowledge/Skills + +Sophia’s original qualifications, and her Ph.D., were in music and she is also a practising musician. She is a musicologist working in the music department of a university. + +## Interests + +She has a particular interest in late 16th and early 17th century music, specifically that of a particular composer living and working in Rome during that period. diff --git a/content/_spice-h2020/ecosystem/content/_polifonia-project/stories/Valeriana_Linguist/Valeriana#1_DiscourseAnalysis.md b/content/_spice-h2020/ecosystem/content/_polifonia-project/stories/Valeriana_Linguist/Valeriana#1_DiscourseAnalysis.md new file mode 100644 index 00000000..22bebb91 --- /dev/null +++ b/content/_spice-h2020/ecosystem/content/_polifonia-project/stories/Valeriana_Linguist/Valeriana#1_DiscourseAnalysis.md @@ -0,0 +1,44 @@ +--- +component-id: Valeriana#1_DiscourseAnalysis +name: DiscourseAnalysis +keywords: +- Discourse Analysis +work-package: +- WP2 +- WP4 +pilot: +- MUSICBO +priority: +- Must have +type: Story +related-components: +- persona: + - Valeriana + +--- +# Valeriana#1_DiscourseAnalysis + +## Persona +Valeriana is 38. She is a linguist expert in discourse analysis and terminology. Her terminology area of specialization is music and cultural heritage. + +## Goal +On the occasion of the restoration of the Teatro Comunale in Bologna Valeriana collaborates on the reconstruction of the main events that took place there. In particular, Valeriana had to write a popularized article about the two busts of Wagner and Verdi in the atrium of the theatre, referring to the relationship that existed between the two artists and the possible influence one had on the other. + +## Scenario +In order to realize her goal, Valeriana must reconstruct the event in question through both historical data and the evidences given of it. By historical data is meant the dates and occasions when the two musicians visited the theatre or even the dates and occasions when Wagner and Verdi's operas were performed. In addition, Valeriana must reconstruct existing accounts of the events: music criticism, biographical notes, newspaper reviews. + +## Competency questions + +CQ1: What was Wagner's first performance in Bologna? On what date? + +CQ2: What was Verdi's first performance in Bologna? On what date? + +CQ3: Who were the public figures who participated in event X (or Y)? + +CQ4: Who were the musicians who participated in event X (or Y)? + +CQ5: Who talked about event X (or Y)? + +CQ6: In which works is event X recounted/traced? + +CQ7: What were the critical reactions to event X (or Y)? Are them positives? diff --git a/content/_spice-h2020/ecosystem/content/_polifonia-project/stories/Valeriana_Linguist/Valeriana#2_Terminology.md b/content/_spice-h2020/ecosystem/content/_polifonia-project/stories/Valeriana_Linguist/Valeriana#2_Terminology.md new file mode 100644 index 00000000..b9c2a968 --- /dev/null +++ b/content/_spice-h2020/ecosystem/content/_polifonia-project/stories/Valeriana_Linguist/Valeriana#2_Terminology.md @@ -0,0 +1,50 @@ +--- +component-id: Valeriana#2_Terminology +name: Terminology +keywords: +- Terminology +work-package: +- WP2 +- WP4 +pilot: +- MUSICBO +priority: +- life improver +type: Story +story type: +- expert driven +related-components: +- persona: + - Valeriana + +--- +# Valeriana#2_Terminology + +## Persona +Valeriana is 38. She is a linguist expert in discourse analysis and terminology. Her terminology area of specialization is music and cultural heritage. + + +## Goal +Valeriana is writing a book on the evolution of singing techniques from a terminological point of view in a period between the 17th and 19th centuries, comparing the main European cultures: France, Italy, Spain, Germany. + + +## Scenario +In order to realize her goal, Valeriana has to reconstruct in a diachronic perspective (temporal evolution) and a diatopic perspective (comparison between different places/cultures) the history of the term 'vocality' by identifying the changing meaning of this word. + + +## Competency questions + +CQ1)what are the meanings of the word 'vocality' in Italian (x)? + +CQ2)What are the meanings of the word 'vocality' in French (y)? + +CQ3) What are the meanings of the word 'vocality' in German (z)? + +CQ4) What are the meanings of the word 'vocality' in Spanish (h)? + +CQ5)How do the words x and y change over time? + +CQ6)What contexts are the word x, y, z, h related to? + +CQ7) How do these contexts change over time? + diff --git a/content/_spice-h2020/ecosystem/content/_polifonia-project/stories/Valeriana_Linguist/readme.md b/content/_spice-h2020/ecosystem/content/_polifonia-project/stories/Valeriana_Linguist/readme.md new file mode 100644 index 00000000..2f86afc4 --- /dev/null +++ b/content/_spice-h2020/ecosystem/content/_polifonia-project/stories/Valeriana_Linguist/readme.md @@ -0,0 +1,33 @@ +--- +component-id: Valeriana +name: "Valeriana" +description: "Valeriana is a linguist expert in discourse analysis and terminology." +long-title: "Valeriana" +type: Persona +work-package: +- WP4 +related-components: +- story: Valeriana#1_DiscourseAnalysis +--- + +# Persona description + +This is a description of the persona Valeriana. + +## Name +Valeriana + +## Age +38 + +## Occupation +- Primary role + - Linguist, terminologist +- Secondary roles + - Researcher + +## Knowledge/Skills +She is a linguist expert in discourse analysis and terminology. + +## Interests +Her terminology area of specialization is music and cultural heritage. diff --git a/content/_spice-h2020/ecosystem/content/_polifonia-project/stories/William_Curator_Europeana/William#1EuropeanFolkMusic.md b/content/_spice-h2020/ecosystem/content/_polifonia-project/stories/William_Curator_Europeana/William#1EuropeanFolkMusic.md new file mode 100644 index 00000000..977218d8 --- /dev/null +++ b/content/_spice-h2020/ecosystem/content/_polifonia-project/stories/William_Curator_Europeana/William#1EuropeanFolkMusic.md @@ -0,0 +1,73 @@ +--- +component-id: William#1_EuropeanFolkMusic +name: "European Folk Music" +type: Story +description: "William is looking for songs matching his search criteria from the 20th and 21st centuries, across various music collections in Europe (particularly folk music)." +related-components: + - persona: + - William +keywords: +- Folk music +- European +- Europeana +- Queries +- Visualisations +work-package: + - WP2 +pilot: + - INTERLINK +story type: + - expert driven +--- + +# European Folk Music + +## Persona +William is an art historian working as a curator for the Europeana platform. As such he’s aiming to reach a large audience with transnational heritage stories to educate and inspire European citizens. He lacks in-depth musicological knowledge. + +## Goal +William wants to create online exhibitions that illustrates themes such as: +- the interconnectedness of (folk) songs of the early 20th century across Europe. +- the connections between prominent 20th and 21st century European composers. + +## Scenario +William is looking for songs matching his search criteria from the 20th and 21st centuries, across various music collections in Europe (particularly folk music). He is interested in descriptions of the songs, descriptionsn of performances/recordings, sheet music, and actual audio(visual) content. He wants to see patterns in the metadata of these songs, such as developments over time, location or over genres, that can give him clues about connections and influences. He also wants to be able to find interesting connections between composers, compositions and performers across the boundaries of the various archive(s) they are listed in. William wants to use such patterns and connections to provide an overview of the cultural/musical interchange between various European Countries pre- and post-WWII. + +At present, William must do a lot of laborious manual work, consulting different search portals separately and then attempting to integrate the results himself in documents and spreadsheets. He must often look people up in different sources to find out more about them, to give context to the results he finds. He has to cope with different naming conventions and typos in the labels used for compositions, composers and performers in the separate archives, spending a lot of time thinking up possible variations on his search terms. In addition, for each new archive he needs to go in search of documentation that will explain the meaning of the metadata fields and their completeness, often needing to email and ring up the archive owners to find out who has the information he needs. Otherwise he may fail to formulate his query correctly, or misinterpret his search results. + +Thanks to Polifonia, William will be presented with one set of integrated results for songs from across the archives. His results will be enriched with additional information about the persons and compositions found, and accompanied by useful links to related persons and compositions. These links will be visualised in a way that helps him to understand the connections between persons and compositions. Distributions of his set of search results over time, location, and other aspects such as genre will be visualised so that he can spot patterns. He will have easy access to information about the metadata of the collections and statistics on its completeness. + +## Example data + +### Cataloguing information +- Thesauri of composers and performers, (and to a lesser extent events, subjects and locations) represented in ontologies shared by multiple collections. These entities can be used to consistently find archival records with annotations connected to the respective composers and performers of interest. An example of a shared vocabulary in use at NISV and other Dutch cultural heritage organisations is the '[Common Thesaurus Audiovisual Archives (GTAA)](http://labs.beeldengeluid.nl/datasets/gtaa)', containing term categories such as Person names, Classification, Genre, Geographical Names, Names and Subjects. Vocabularies typically contain a label and alternative labels, but may also have additional information such as occupation of a person, or the hierarchy between subjects. An example of a composer represented in the GTAA: https://data.beeldengeluid.nl/gtaa/77493 +- Wikidata entities for compositions, composers and performers (and to a lesser extent events, subjects and locations). This data can help to augment collection records with additional information, including biographical information, images and links to significant works and other persons. It can also assist in linking thesauri or expanding search queries with alternative labels to compensate for variations in naming conventions and typos. An example of a Wikidata entity for a composer: https://www.wikidata.org/wiki/Q504743 +- Entries in other databases for compositions, composers and performers. E.g. Discogs. As for Wikidata, this data can help with augmenting records with additional information and linking across archives. An example of a composer in Discogs: https://www.discogs.com/artist/171391-Louis-Andriessen +- Audiovisual archival records (e.g. LPs, concert recordings, TV and Radio programmes) with annotations connecting them to the respective composers and performers of interest. They also contain information about time, location and other aspects such as genre. This information can be aggregated to spot patterns in sets of results and compare them in different collections, e.g. a performer who starts on the radio, appears with increasing frequency and then moves to television. Alternative titles and related titles (e.g. album and track titles) can help in disambiguating and correctly identifying compositions. These records can be accessed e.g. as [Schema.org](https://schema.org/) based Linked Data via a SPARQL endpoint. See for example [this query](https://cat.apis.beeldengeluid.nl/sparql#transientDatasources=https%3A%2F%2Fcat.apis.beeldengeluid.nl%2Fsparql&query=PREFIX%20rdf%3A%20%3Chttp%3A%2F%2Fwww.w3.org%2F1999%2F02%2F22-rdf-syntax-ns%23%3E%0APREFIX%20rdfs%3A%20%3Chttp%3A%2F%2Fwww.w3.org%2F2000%2F01%2Frdf-schema%23%3E%0APREFIX%20sdo%3A%20%3Chttps%3A%2F%2Fschema.org%2F%3E%0APREFIX%20skos%3A%20%3Chttp%3A%2F%2Fwww.w3.org%2F2004%2F02%2Fskos%2Fcore%23%3E%0A%0ASELECT%20DISTINCT%20%3FprogramId%20%3Fmain_title%20%3Fgtaa_concept%20%3Fperson_name_pref_label%20%0AWHERE%20%7B%0A%20%20VALUES%20%3Fperson_name_pref_label%20%7B%0A%20%20%20%20'Andriessen%2C%20Louis'%0A%20%20%7D%0A%20%20%0A%20%20%3FprogramId%20a%20sdo%3ACreativeWork%20.%20%0A%0A%20%20%3FprogramId%20(sdo%3Aabout%7Csdo%3Amentions%7Csdo%3Acreator%7Csdo%3Acontributor%7Csdo%3Aactor%7Csdo%3Acrew%7Csdo%3Aperformer)%2F%0A%20%20(sdo%3Aabout%7Csdo%3Amentions%7Csdo%3Acreator%7Csdo%3Acontributor%7Csdo%3Aactor%7Csdo%3Acrew%7Csdo%3Aperformer)%20%3Fgtaa_concept%20.%0A%0A%20%20%3Fgtaa_concept%20skos%3AprefLabel%20%3Fperson_name_pref_label.%0A%20%20%3FprogramId%20sdo%3Aname%20%3Fname%20.%0A%0A%20%20BIND(str(%3Fname)%20as%20%3Fmain_title)%0A%7D%20LIMIT%2020) for collection items related to the composer Louis Andriessen. +- Description of collection metadata. For example [this collection description](https://mediasuitedata.clariah.nl/dataset/audiovisual-collection-daan) contains [a list of metadata fields and their descriptions](https://mediasuitedata.clariah.nl/dataset/7879cf23-3ac7-4f27-9b3a-475b0c3b499f/resource/9f9bdb43-4be6-4019-b845-8b14f884745f/download/fielddescriptions.tsv), + + +## Competency questions +CQ1: In which collections/datasets does song X occur? + +CQ2: In which countries does song X occur? + +CQ3: Which compositions match with William's thematic focus for the exhibition? + +CQ4: Which composers and performers are related to these compositions?' + +CQ5: In which historical documents is composer X mentioned? + +CQ6: Under which titles is this song known? (multi-language) + +CQ7: What are the relations between the relevant countries, compositions, composers and performers in the various collections? + +CQ8: How have the relations between the relevant countries, compositions, composers and performers in the various collections changed over time? + +CQ9: How many search results are there per time period? + +CQ10: How many search results are there per genre? + +CQ11: What is the relevant metadata for music collections and what do the different fields mean? + +CQ12: Inspect the degree to which metadata fields have been filled across the collections, see trends and gaps? diff --git a/content/_spice-h2020/ecosystem/content/_polifonia-project/stories/William_Curator_Europeana/readme.md b/content/_spice-h2020/ecosystem/content/_polifonia-project/stories/William_Curator_Europeana/readme.md new file mode 100644 index 00000000..2cab469f --- /dev/null +++ b/content/_spice-h2020/ecosystem/content/_polifonia-project/stories/William_Curator_Europeana/readme.md @@ -0,0 +1,31 @@ +--- +component-id: William +name: "William" +description: "A curator of the Europeana platform, William has a background in art history." +long-title: "William" +type: Persona +work-package: +- WP2 +pilot: +- INTERLINK +related-components: +- story: + - William#1_EuropeanFolkMusic +--- + +# William +This is a description of the persona William. This persona is relevant to the pilot #INTERLINK + +## Name +William + +## Age +32 + +## Occupation +A curator of the Europeana platform + +## Knowledge/Skills +William has a background in art history. He researches specific topics for the ‘music’ collection (https://www.europeana.eu/nl/collections/topic/62-music) in Europeana. + +## Interests diff --git a/content/_spice-h2020/ecosystem/content/_polifonia-project/textual-corpus-population/README.md b/content/_spice-h2020/ecosystem/content/_polifonia-project/textual-corpus-population/README.md new file mode 100644 index 00000000..c6fa0bd1 --- /dev/null +++ b/content/_spice-h2020/ecosystem/content/_polifonia-project/textual-corpus-population/README.md @@ -0,0 +1,294 @@ +--- +component-id: textual-corpus-population +name: Textual Corpus Population +description: Polifonia downloader and digitiser of textual data. +type: Repository +release-date: 07-12-2021 +release-number: v0.1 +work-package: WP4 +pilot: + - MusicBo +keywords: + - ocr + - image + - text digitisation + - textual data +changelog: n/a. +licence: MIT +release-link: n/a. +image: n/a. +logo: n/a. +demo: n/a +links: n/a +running-instance: n/a +credits: A. Poltronieri (UniBo), R. Tripodi (UniBo) +related-components: + - File scraper + - Internet Culturale Scraper + - Hemeroteca Digitale Scraper + - DigiPress Scraper + - Ocr script +bibliography: n/a +--- + +# Textual Corpus Population + +This repository contains the code for downloading and digitising documents used as a corpus for the [Polifonia Project](https://polifonia-project.eu/). +The repository contains two main types of code: +* file scrapers that automate the download of big repositories of textual data; +* OCR script for digitising the downloaded files. + +--- +## Running Docker Image + +You can launch the `ocr_script` by launching a Docker container based on the image created using the Dockerfile available in this repository. +To launch the Docker, you will need to: + +Pull the Docker Image: +``` +docker pull andreamust/ocr-app:0.3 +``` +Create a folder containing the files from ocrise and download the `.env` file from this folder. The paths to these two items should be specified in the `docker run` command, and should replace the placeholders `` and ``. + +Run the Docker Container: +``` +docker run --name ocr-script --rm -v :/app/eval_files -it --env-file andreamust/ocr-app:0.3 +``` + +Both the `input folder` and the `.env` file will be Docker Bind Mounts will hence allow to store files and change parameters on the go. + +For the parameter accepted in the `.env` file, please refer to the documentation below. +Notice that the `.env` file handles boolean parameters differently, i.e. you only need to enter any character to indicate `True`, and leave the field blank to indicate `False`. +Also, the file does not accept whitespace, and strings must be entered without quotes. + + +## Information on installation and setup +For running all the scripts you need to have Python (3.6+, version 3.9 suggested) and pip3 installed on your machine. +Instructions for installing Python and pip can be found on the [Python download page](https://www.python.org/downloads/). + +Once Python is installed, it is necessary to clone the repository using git (installation information on [this page](https://git-scm.com/book/en/v2/Getting-Started-Installing-Git)): + +``` +git clone https://github.com/polifonia-project/OCR.git +``` + +The libraries needed to execute all scripts can be installed running: +``` +pip install requirements.txt +``` + +For running the OCR Script it is required to install [Tesseract](https://github.com/tesseract-ocr/tesseract) and the trained data for the languages you need to work with. +The full documentation for installing Tesseract and its dependencies can be found in [Tesseract official documentation](https://tesseract-ocr.github.io/tessdoc/Installation.html). + +--- +## Usage +Since these two types of scripts can also be used as stand-alone software, they are documented separately, while the setup and the requirement installation is documented for all the scripts contained in the repository. + +--- + +## Internet Culturale scraper +For downloading resources from "Internet Culturale" you need to run the ```internet_culturale_scraper.py``` as: +``` +python3 src/internet_culturale_scraper.py [-h] [--resource_url] [--output_path] +``` + +The parameter to pass are described as follows: +``` +--resource_url (string): the url of a resource page on "Internet Culturale" (e.g. "https://www.internetculturale.it/it/913/emeroteca-digitale-italiana/periodic/testata/8670") + +--output_path (string): the existing path in which to save the downloaded resource +``` + +You can also browse the script's documentation by typing: +``` +python3 src/internet_culturale_scraper.py --help +``` + +The script will download all files related to the given resource to the specified folder. +Depending on the size of the resource and the speed of the connection, the download may take several hours. + +In addition, a log file named ```download_log.txt``` will be generated in the output folder. This file will list: +* the number of files downloaded; +* the number of errors encountered; +* the list of files not downloaded. + +To attempt to download the non-downloaded files again, simply restart the script with the same parameters. + +--- +## Internet Culturale Search Results Scraper +This scraper downloads all the results of a search on the **Internet Culturale**. +To run the script, simply run the ```download_all.py``` script as: +``` +download_all.py [-h] [--search_url] [--output_path] +``` +The parameter to pass are described as follows: +``` +--search_url (string): the url of the results page on "Internet Culturale" (e.g. "https://www.internetculturale.it/it/16/search?q=musica&instance=magindice&__meta_typeTipo=testo+a+stampa&__meta_typeLivello=monografia&pag=1") + +--output_path (string): the existing path in which to save the downloaded resources +``` +The ```search_url``` parameter needs to be the url of a result page of a search on Internet Culturale but without the page number (the last digit(s) at the end of the url). + +You can also browse the script's documentation by typing: +``` +python3 src/download_all.py --help +``` + + +--- + +## Hemeroteca Digital scraper + +For downloading resources from "Internet Culturale" you need to run the ```internet_culturale_scraper.py``` as: +``` +python3 src/hemeroteca_digital_scraper.py [-h] [--resource_url] [--output_path] +``` + +The parameter to pass are described as follows: +``` +--resource_url (string): the url of a resource page on "Hemeroteca Digital" (e.g. "http://hemerotecadigital.bne.es/results.vm?q=parent%3A0003894964&s=0&lang=es" +``` +``` +--output_path (string): the existing path in with to save the downloaded resource +``` + +You can also browse the script's documentation by typing: +``` +python3 src/hemeroteca_digital_scraper.py --help +``` + +The resource url must be the url of a specific resource search result of the "Query" section, only searching for resource's "Title", and clicking on "Search among free-access titles", as illustrated in the image: +![](etc/img/hemeroteca_digital.png) +Remember to select **only** one resource at the time. + +--- + +## OCR Script + +For digitising the textual documents you need to run ```python3 src/ocr_script.py```. +The script can perform OCR on multiple documents at the time. In particular, it has been developed to automatically perform document digitisation starting from: +* single image files (in multiple formats); +* single .pdf files; +* folders of images; +* folders of pdfs. + +The script accepts the following parameters in input: +``` +ocr_script.py [-h] [--input_path] [--saved_file_path] [--converted_image_output_path] [--output_format] [--language_mode] + [--single_language] [--multiple_langs] [--gray_scale] [--remove_noise] + [--thresholding] [--dilate] [--erosion] [--edge_detection] [--skew_correction] + [--page_segmentation_mode] [--ocr_engine_mode] + +``` + +The parameter to pass are described as follows: + +``` + --input_path (string): the path of the local file to be digitised or the local folder containing the files to be digitised; +``` +``` + --saved_file_path (string, default ''): specifies the directory in which to save the output .txt file; +``` +``` + --output_format (string, default 'png'): ONLY TO BE USED IF THE INPUT SOURCE IS IN .pdf FORMAT: specifies the format of the conversion from .pdf to image; +``` +``` + --converted_image_output_path (string): ONLY TO BE USED IF THE INPUT SOURCE IS A SINGLE .pdf FORMAT: spcifies the directory in which to save the converted images. In the case of a folder of pdf files the converted images will be created in folders located inside the specified folder; +``` +``` + --language_mode (string: 'mono' or 'multi'): allows to specify if the content to digitise is in one language or multilingual; +``` +``` + --single_language (string): ONLY TO USE IF --language_mode='mono': takes the language parameter as defined in Tesseract documentation; +``` +``` + --multiple_langs (boolean): ONLY TO USE IF --language_mode='multi': takes the language parameters as defined in Tesseract documentation, comma separated; +``` +``` + --gray_scale (boolean): enables the gray scale preprocessing; +``` +``` + --remove_noise (boolean): enables the remove noise preprocessing; +``` +``` + --thresholding (boolean): enables the thresholding preprocessing; +``` +``` + --dilate (boolean): enables the dilate preprocessing; +``` +``` + --erosion (boolean): enables the erosion preprocessing; +``` +``` + --edge_detection (boolean): enables the edge detection preprocessing; +``` +``` + --skew_correction (boolean): enables the skew correction preprocessing; +``` +``` + --page_segmentation_mode (integer): allows to specify the Tesseract Page Segmentation Mode (PSM), as defined in Tesseract documentation; +``` +``` + --ocr_engine_mode (integer): allows to specify the Tesseract Ocr Engine Mode (OEM), as defined in Tesseract documentation; +``` + +You can also browse the script's documentation by typing: +``` +python3 src/ocr_script.py --help +``` + +For preprocessing, the script reuses the OpenCV library. You can read the [official documentation](https://opencv.org/) for more information on how the preprocessing algorithms work. + +With regard instead to the parameters defined by Tesseract (e.g. Page Seg because of Page Segmentation Mode and Ocr Engine Mode), it is possible to read a comprehensive guide in the [Tesseract documentation](https://github.com/tesseract-ocr/tessdoc). +However, here is a quick guide to the PSM parameters: + +| Parameter | Description | +|-----------|-----------------------------------------------------------------------------------------------| +| 0 | Orientation and script detection (OSD) only. | +| 1 | Automatic page segmentation with OSD. | +| 2 | Automatic page segmentation, but no OSD, or OCR. | +| 3 | Fully automatic page segmentation, but no OSD. (Default) | +| 4 | Assume a single column of text of variable sizes. | +| 5 | Assume a single uniform block of vertically aligned text. | +| 6 | Assume a single uniform block of text. | +| 7 | Treat the image as a single text line. | +| 8 | Treat the image as a single word. | +| 9 | Treat the image as a single word in a circle. | +| 10 | Treat the image as a single character. | +| 11 | Sparse text. Find as much text as possible in no particular order. | +| 12 | Sparse text with OSD. | +| 13 | Raw line. Treat the image as a single text line, bypassing hacks that are Tesseract-specific. | +--- +# Evaluation and Error Analysis +This repository also contains files for the evaluation of some resources downloaded OCRised using the software contained in this repository. + +These files were produced using [ocreval](https://github.com/eddieantonio/ocreval) and contain all the detailed error and accuracy information about the OCR of some samples of resources compared to manually annotated ground-truth files. + +The evaluation files can be found in the [evaluation](evaluation) folder, and are in turn divided in two more folders: +* the [ground_truth](evaluation/ground_truth) folder contains the manually annotated filed for evaluating the automatically generated ones; +* the [accuracy](evaluation/accuracy) folder contains the files generated with ocreval and contain the error analysis logs. + +--- +# License + +MIT License + +Copyright (c) 2021 Andrea Poltronieri, Rocco Tripodi + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. \ No newline at end of file diff --git a/content/_spice-h2020/ecosystem/content/_polifonia-project/textual-corpus-population/ocreval/CHANGELOG.md b/content/_spice-h2020/ecosystem/content/_polifonia-project/textual-corpus-population/ocreval/CHANGELOG.md new file mode 100644 index 00000000..a01660b6 --- /dev/null +++ b/content/_spice-h2020/ecosystem/content/_polifonia-project/textual-corpus-population/ocreval/CHANGELOG.md @@ -0,0 +1,66 @@ +# Change Log +All notable changes to this project will be documented in this file. +This project adheres to [Semantic Versioning](http://semver.org/). +This file is inspired by [Keep a `CHANGELOG`](http://keepachangelog.com/). + +## [7.0.1] - 2018-11-21 +### Fixed +- Compile error involving inappropriate use of `ssize_t` + +## [7.0.0] - 2018-11-21 +### Changed +- Changed name from `isri-ocr-evaluation-tools` to `ocreval` ([#21]) + +## [6.1.2] - 2017-01-04 +### Fixed +- Read in UTF-8 characters in `accsum` ([#14]) + +## [6.1.1] - 2016-02-22 +### Fixed +- No longer spuriously abort if inputs are longer than 65,536 characters ([#10]) + +## [6.1.0] - 2016-01-01 +### Added +- `make exports` which outputs shell `export` commands (to avoid global installation) + +### Changed +- More conventional directory layout ([#4]) + +## [6.0.1] - 2016-01-04 +### Fixed +- Bug in implementation of [WB6](http://unicode.org/reports/tr29/#WB6) +- Special case U+0020 SPACE ' ' as a graphic character +- Clang warnings + +## [6.0.0] - 2016-01-04 +### Added +- Word segmentation using [Unicode word boundaries](http://unicode.org/reports/tr29/#Word_Boundaries). + +### Changed +- Start following [SemVer](http://semver.org) properly. +- All input and output is in UTF-8 +- Fixes to handle non-BMP code points (code points beyond U+FFFF) + +### Removed +- `uni2asc` and `asc2uni` (redundant due to change to UTF-8) + +## [5.1.3] - 2015-11-15 +### Changed +- More idiomatic `make` build system + +### Fixed +- Compiles on modern OS X and Ubuntu + +[7.0.1]: https://github.com/eddieantonio/ocreval/compare/v7.0.0...v7.0.1 +[7.0.0]: https://github.com/eddieantonio/ocreval/compare/v6.1.2...v7.0.0 +[6.1.2]: https://github.com/eddieantonio/isri-ocr-evaluation-tools/compare/v6.1.1...v6.1.2 +[6.1.1]: https://github.com/eddieantonio/isri-ocr-evaluation-tools/compare/v6.1.0...v6.1.1 +[6.1.0]: https://github.com/eddieantonio/isri-ocr-evaluation-tools/compare/v6.0.1...v6.1.0 +[6.0.1]: https://github.com/eddieantonio/isri-ocr-evaluation-tools/compare/v6.0.0...v6.0.1 +[6.0.0]: https://github.com/eddieantonio/isri-ocr-evaluation-tools/compare/v5.1.3...v6.0.0 +[5.1.3]: https://github.com/eddieantonio/isri-ocr-evaluation-tools/compare/v5.1.0...v5.1.3 + +[#4]: https://github.com/eddieantonio/isri-ocr-evaluation-tools/issues/4 +[#10]: https://github.com/eddieantonio/isri-ocr-evaluation-tools/issues/10 +[#14]: https://github.com/eddieantonio/isri-ocr-evaluation-tools/issues/14 +[#21]: https://github.com/eddieantonio/ocreval/issues/21 diff --git a/content/_spice-h2020/ecosystem/content/_polifonia-project/textual-corpus-population/ocreval/README.md b/content/_spice-h2020/ecosystem/content/_polifonia-project/textual-corpus-population/ocreval/README.md new file mode 100644 index 00000000..0103e43d --- /dev/null +++ b/content/_spice-h2020/ecosystem/content/_polifonia-project/textual-corpus-population/ocreval/README.md @@ -0,0 +1,119 @@ +# ocreval + +[![Build Status](https://travis-ci.org/eddieantonio/ocreval.svg?branch=master)](https://travis-ci.org/eddieantonio/ocreval) + +The `ocreval` consist of 17 tools for measuring the +performance of and experimenting with OCR output. See [the user +guide][user-guide] for more information. + +[user-guide]: https://github.com/eddieantonio/ocreval/raw/master/user-guide.pdf + +`ocreval` is a modern port of the [ISRI Analytic Tools for OCR Evaluation][isri], +with UTF-8 support and other improvements. + +[isri]: http://citeseerx.ist.psu.edu/viewdoc/download?doi=10.1.1.216.9427&rep=rep1&type=pdf + +# Install (macOS) + +Using [Homebrew][brew]: + + brew install eddieantonio/eddieantonio/ocreval + +[brew]: http://brew.sh/ + + +Building +======== + +To build the library and all of the programs, ensure that you have all +required [dependencies](#dependencies). + +## Dependencies + +`ocreval` requires [utf8proc](https://github.com/JuliaLang/utf8proc) +to build from source. + +### macOS + +Using [Homebrew][brew]: + + brew install utf8proc + +### Ubuntu/Debian + +You may need to install `make` and a C compiler: + + sudo apt install build-essential + +Then install, `libutf8proc-dev`: + + sudo apt install libutf8proc-dev + +If `libutf8proc-dev` cannot be installed using `apt`, follow +[Other Linux](#other-linux) below + +### Other Linux + +Install `libutf8proc-dev` manually: + + curl -OL https://github.com/JuliaLang/utf8proc/archive/v1.3.1.tar.gz + tar xzf v1.3.1.tar.gz + cd utf8proc-1.3.1/ + make + sudo make install + # Rebuild the shared object cache - needed to load the library + # at runtime + sudo ldconfig + cd - + +## Building the tools + +Once all dependencies are installed, you may compile all of the +utilities using `make`: + + make + +## Installing + +Install to `/usr/local/`: + + sudo make install + +Note: You will not need `sudo` on macOS if you have `brew` installed. + +## Installing "locally" + +This will not copy any files at all, but instead create the appropriate +shell commands to add all executables, man pages, and libraries to +the correct path (replace `~/.bashrc` with your start-up file): + + make exports >> ~/.bashrc + +# Porting Credits + +Ported by Eddie Antonio Santos, 2015, 2016. See `NOTICE` for copyright +information regarding the original code. + +# License + +### ocreval + +Copyright 2015–2018 Eddie Antonio Santos + +### The ISRI Analytic Tools for OCR Evaluation + +Copyright 1996 The Board of Regents of the Nevada System of Higher +Education, on behalf, of the University of Nevada, Las Vegas, +Information Science Research Institute + +Licensed under the Apache License, Version 2.0 (the "License"); you +may not use this file except in compliance with the License. You may +obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or +implied. See the License for the specific language governing +permissions and limitations under the License. diff --git a/content/_spice-h2020/ecosystem/content/_polifonia-project/tonalities_pilot/README.md b/content/_spice-h2020/ecosystem/content/_polifonia-project/tonalities_pilot/README.md new file mode 100644 index 00000000..f13a716c --- /dev/null +++ b/content/_spice-h2020/ecosystem/content/_polifonia-project/tonalities_pilot/README.md @@ -0,0 +1 @@ +# tonalities_pilot diff --git a/content/_spice-h2020/ecosystem/content/_polifonia-project/tonalities_pilot/Tonalities.md b/content/_spice-h2020/ecosystem/content/_polifonia-project/tonalities_pilot/Tonalities.md new file mode 100644 index 00000000..9bb9249e --- /dev/null +++ b/content/_spice-h2020/ecosystem/content/_polifonia-project/tonalities_pilot/Tonalities.md @@ -0,0 +1,36 @@ +# modal-tonal-ontology +Ontology dedicated to the modal-tonal organisation of polyphonic works. + +Shield: [![CC BY-NC-SA 4.0][cc-by-nc-sa-shield]][cc-by-nc-sa] + +This work is licensed under a +[Creative Commons Attribution-NonCommercial-ShareAlike 4.0 International License][cc-by-nc-sa]. + +[![CC BY-NC-SA 4.0][cc-by-nc-sa-image]][cc-by-nc-sa] + +[cc-by-nc-sa]: http://creativecommons.org/licenses/by-nc-sa/4.0/ +[cc-by-nc-sa-image]: https://licensebuttons.net/l/by-nc-sa/4.0/88x31.png +[cc-by-nc-sa-shield]: https://img.shields.io/badge/License-CC%20BY--NC--SA%204.0-lightgrey.svg + + +container-id: tonalities +name: Tonalities +description: This container collects the components of the Tonalities pilote as part of Polifonia. +type: Pilot +work-package: +- WP1 +- WP2 +- WP3 +- WP5 +- WP6 + +pilot: +- ThePilot +project: polifonia-project +bibliography: + +- funder: + - name: Horizon 2020 Framework Programme + url: https://cordis.europa.eu/programme/id/H2020-EC + grant-agreement: "https://cordis.europa.eu/project/id/101004746" +credits: "This project has received funding from the Lorem Ipsum Funder research and innovation programme under grant agreement 01234556." diff --git a/content/_spice-h2020/ecosystem/content/_polifonia-project/tonalities_pilot/mockup/changeLog.md b/content/_spice-h2020/ecosystem/content/_polifonia-project/tonalities_pilot/mockup/changeLog.md new file mode 100644 index 00000000..ff900a5e --- /dev/null +++ b/content/_spice-h2020/ecosystem/content/_polifonia-project/tonalities_pilot/mockup/changeLog.md @@ -0,0 +1,5 @@ +# Change Log +All notable changes to this project will be documented in this file. + +The format is based on [Keep a Changelog](http://keepachangelog.com/) +and this project adheres to [Semantic Versioning](http://semver.org/). \ No newline at end of file diff --git a/content/_spice-h2020/ecosystem/content/_polifonia-project/tonalities_pilot/mockup/mockup.md b/content/_spice-h2020/ecosystem/content/_polifonia-project/tonalities_pilot/mockup/mockup.md new file mode 100644 index 00000000..3a009d14 --- /dev/null +++ b/content/_spice-h2020/ecosystem/content/_polifonia-project/tonalities_pilot/mockup/mockup.md @@ -0,0 +1,50 @@ +--- +component-id: tonalities-mockup +type: RequirementsCollection +name: Tonalities mockup +description: This is the mockup of Tonalities annotation interface of collaborative analysis +work-package: +- WP1 +- WP5 + +pilot: +- TONALITIES + +project: polifonia-project +resource: https://github.com/polifonia-project/tonalities_pilot/tree/main/mockup +release-date: 2021/01/25 +release-number: 1.1 +release-link: https://github.com/polifonia-project/tonalities_pilot/tree/main/mockup +changelog: https://github.com/polifonia-project/tonalities_pilot/blob/main/mockup/changeLog.md +licence: +- CC-BY_v4 +copyright: "Copyright (c) 2023 The Polifonia Project Contributors" +contributors: +- Thomas Bottini <> +- Adam Filaber <> +- Félix Poullet-Pagès <> +- Marco Gurrieri <> +- Christophe Guillotel-Nothmann <> +- Antoine Lebrun <> + +related-components: +- informed-by: + - https://github.com/polifonia-project/tonalities_pilot/tree/main/validation/interface + +- story: + - Sethus#1_ConflictingTheoreticalInterpretations + - Sethus#2_CreateRelevantCorpus + - Sethus#1_ConflictingAnalyticalAnnotations + +- persona: + - Sethus + +- evaluated-in: + - https://github.com/polifonia-project/tonalities_pilot/tree/main/validation/interface +- serves: + - https://github.com/Amleth/sherlock-tonalities/ +- funder: + - name: Horizon 2020 Framework Programme + url: https://cordis.europa.eu/programme/id/H2020-EC + grant-agreement: "https://cordis.europa.eu/project/id/101004746" +- credits: "This project has received funding from the European Union's Horizon 2020 research and innovation programme under grant agreement 01234556."--- diff --git a/content/_spice-h2020/ecosystem/content/_polifonia-project/tunes-ontology/README.md b/content/_spice-h2020/ecosystem/content/_polifonia-project/tunes-ontology/README.md new file mode 100644 index 00000000..a2951b64 --- /dev/null +++ b/content/_spice-h2020/ecosystem/content/_polifonia-project/tunes-ontology/README.md @@ -0,0 +1,67 @@ +# Tunes Ontology +A specialisation of [Music Meta](https://github.com/polifonia-project/musicmeta-ontology) for folk music. + +[![DOI](https://zenodo.org/badge/372536364.svg)](https://zenodo.org/badge/latestdoi/372536364) +[![License: CC BY 4.0](https://img.shields.io/badge/License-CC_BY_4.0-lightgrey.svg)](https://creativecommons.org/licenses/by/4.0/) + +> 🔗 Ontology URI: [https://w3id.org/polifonia/ontology/tunes/](https://w3id.org/polifonia/ontology/tunes/) + +The main novelty at the metadata level consists in grouping *tunes* into *tune families* depending on their similarity (an association that is often performed through manual inspection and analysis). Each `tunes:Tune` is seen as a specialisation of a `mm:MusicEntity` that may belong to a `tunes:TuneFamily`, which in turn specialises `core:Collection` (please, note that the term *Collection* is seen from an ontology engineering perspective, as this reuses the Collection ontology design pattern in Music Meta). + +![Tune family](diagrams/tunes_ontology.png) + +As can be seen from the diagram/example below, the membership of a `tunes:Tune` to `tunes:TuneFamily` is described by `core:CollectionMembership`, which provides additional information on the actual strength of such membership. The latter is captured by `core:CollectionMembership`, which is specialised by two entities here: `tunes:WeakFamilyMembership` and `tunes:NeutralFamilyMembership`. + +Another extension that is peculiar to folk music, is the possibility to group tunes' `mm:Lyrics` into a `tunes:LyricsFamily`. This is analogous to `tunes:TuneFamily` as described above. The same criteria for expressing membership strength apply as before. As shown in the diagram/example below, a tune and lyrics families are not exclusive. + +## Competency questions addressed + +| **ID** | **Competency Question** | +|--------|--------------------------------------------------------------------------------------------------------------------------------------------| +| CQ1 | Has composition X been identified as variant in a tune family? | +| CQ2 | Which tune family does composition X belong to? | +| CQ3 | Who assigned composition X to tune family Y? | +| CQ4 | With what level of confidence is composition X a variant in tune family Y? | +| CQ5 | What are all compositions in tune family X? | +| CQ6 | What are the similarities / differences of all compositions in tune family X according to measure Y? | +| CQ7 | To what tune families is tune family X related, given similarity measure Y?| + + +## Examples of SPARQL queries addressed + +- Which are the tunes that compose a tune family associated to a composer? +``` +PREFIX tunes: +PREFIX mm: +PREFIX core: +SELECT DISTINCT ?artist ?collection ?musicEntity +WHERE { ?artist mm:isComposerOf ?musicEntity . +?musicEntity core:isMemberOf ?collection . +} +GROUP BY ?artist ?collection +``` + +## Imported ontologies + +### Direct imports +- [Music-Meta Ontology](https://w3id.org/polifonia/ontology/music-meta/) + +### Indirect imports +- [Core Ontology](https://w3id.org/polifonia/ontology/core/) + +## Statistics +We report here useful statistics of the Tunes Ontology Module: + +- number of classes: 95 +- number of object properties: 92 +- number of datatype properties: 17 +- number of logical axioms: 810 + +## License + +This work is licensed under a +[Creative Commons Attribution 4.0 International License][cc-by]. + + +[cc-by]: http://creativecommons.org/licenses/by/4.0/ + diff --git a/content/_spice-h2020/ecosystem/content/_polifonia-project/tunes-ontology/header.md b/content/_spice-h2020/ecosystem/content/_polifonia-project/tunes-ontology/header.md new file mode 100644 index 00000000..c35ac2ff --- /dev/null +++ b/content/_spice-h2020/ecosystem/content/_polifonia-project/tunes-ontology/header.md @@ -0,0 +1,46 @@ +--- +component-id: https://w3id.org/polifonia/ontology/tunes/ +type: Ontology +name: Tunes Ontology +description: A specialisation of the Music Meta Ontology for folk music. +image: diagrams/tunes_ontology.png +work-package: +- WP2 +pilot: +- TUNES +project: polifonia-project +resource: ontology/tunes.owl +release-date: 13/04/2023 +release-number: v1.0 +release-link: https://github.com/polifonia-project/ontology-network/releases +doi: 10.5281/zenodo.7919970 +changelog: https://github.com/polifonia-project/ontology-network/releases +licence: +- CC-BY_v4 +copyright: "Copyright (c) 2023 Tunes Ontology Contributors" +contributors: # replace these with the GitHub URL of each contributor +- Jacopo de Berardinis +- Andrea Poltronieri +- Peter van Kranenburg +related-components: +- informed-by: + - polifoniacq-dataset +- reuses: # any reused/imported ontology + - https://w3id.org/polifonia/ontology/core/ +- story: # any related story this ontology addresses + - Mark#1_FolkMusic +- persona: # any persona this ontology addresses + - Mark +--- + + +# Tunes Ontology + +The Tunes module extends and specialises Music Meta for folk music. The main +novelty consists in grouping and describing tunes into “tune families” depending +on their melodic similarity (an association requiring rich provenance description +of the musicological analysis on the source); which also extends to lyrics families. + + +[Link to the website](https://github.com/polifonia-project/tunes-ontology) diff --git a/content/_spice-h2020/ecosystem/content/_polifonia-project/web_portal/README.md b/content/_spice-h2020/ecosystem/content/_polifonia-project/web_portal/README.md new file mode 100644 index 00000000..36727d92 --- /dev/null +++ b/content/_spice-h2020/ecosystem/content/_polifonia-project/web_portal/README.md @@ -0,0 +1,32 @@ +--- +component-id: web-portal-prototypes +name: Prototypes of the web portal +description: Documents and materials relevant to the Polifonia Web portal (WP1.T3) +type: Documentation +release-date: 2022-06-20 +release-number: latest +work-package: WP1 +keywords: + - music heritage interfaces + - daschboard + - generous interfaces +licence: CC-BY +release link: https://github.com/polifonia-project/web_portal/releases/latest +credits: Giulia Manganelli (UNIBO), Marco Grasso (UNIBO), Marilena Daquino (UNIBO) +related-components: + - musoW + - clef + - melody +--- + +# Polifonia web portal + +[![DOI](https://zenodo.org/badge/370595787.svg)](https://zenodo.org/badge/latestdoi/370595787) + +The repository includes graphic prototypes of the Polifonia web portal. It is organised as follows: + + * **analysis** includes annotated competency questions extracted from [Polifonia stories v.1](https://github.com/polifonia-project/stories/releases/tag/v1.0) in the form of csv files, and the exploratory data analysis (Jupyter notebooks) performed to understand the scope of pilots data, the distribution of entities and relations between entities, the information patterns and the cross-connections between Polifonia pilots. + * **prototypes** include graphic prototypes of the interfaces of the web portal, including templates for creating data stories - to be produced with [MELODY](https://github.com/polifonia-project/dashboard) + * **questionnaires** include (1) a preliminary survey with bachelor's students to understand habits and expectations of lay people when consuming music heritage websites and (2) the summary of the evaluation of the graphic prototypes, as a result of focus groupw with music scholars of the Polifonia consortium + +The web portal is part of [Polifonia](https://polifonia-project.eu/) H2020 project (described in Deliverable 1.9). diff --git a/content/_spice-h2020/ecosystem/content/_polifonia-project/web_portal/analysis/data_layer.md b/content/_spice-h2020/ecosystem/content/_polifonia-project/web_portal/analysis/data_layer.md new file mode 100644 index 00000000..bfeb9cfa --- /dev/null +++ b/content/_spice-h2020/ecosystem/content/_polifonia-project/web_portal/analysis/data_layer.md @@ -0,0 +1,42 @@ + +# Data layer + +## Checklist +Simplified names of subject entities and properties (use the next table for mapping). + +Add :white_check_mark: if the entity/property is relevant to the pilot. + + + +| Subject | Property | ACCESS | BELLS | CHILD | FACETS | INTERLINK | MUSICBO | MEETUPS | ORGANS | TUNES | TONALITIES | +| -------------- | --------------- | ------ | ------ | ------ | ------ | ------ | ------ | ------ | ------ | ------ | ------ | +| Music work | type | | |:white_check_mark: | |:white_check_mark: | | :white_check_mark: | | :white_check_mark: | :white_check_mark: | +| Music work | genre | :white_check_mark: | |:white_check_mark: | | :white_check_mark: | | :white_check_mark: | | :white_check_mark: | :white_check_mark: | +| Music work | title | | |:white_check_mark: | |:white_check_mark: | :white_check_mark: | :white_check_mark: | | :white_check_mark: | :white_check_mark: | +| Music work | composer | | |:white_check_mark: | |:white_check_mark: | | :white_check_mark: | | :white_check_mark: | :white_check_mark: | +| Music work | date | | | :white_check_mark: | | :white_check_mark: | | :white_check_mark: | | :white_check_mark: | :white_check_mark: | +| Music work | source (manuscript) | | :white_check_mark: | | | | |:white_check_mark: | | :white_check_mark: | :white_check_mark: | +| Music work | place of production | | | | | :white_check_mark: | | :white_check_mark: | | :white_check_mark: | | +| Music work | mediums of performance collection | | | :white_check_mark: | | | :white_check_mark: | :white_check_mark: | | | | +| Source (manuscript) | type | | |:white_check_mark: | | | | | | :white_check_mark: | :white_check_mark: | +| Source (manuscript) | title | | |:white_check_mark: | | | | | | :white_check_mark: | :white_check_mark: | +| Source (manuscript) | place of production | | |:white_check_mark: | | | | | | :white_check_mark: | | +| Source (manuscript) | creator | | |:white_check_mark: | | | | | | :white_check_mark: | :white_check_mark: | +| Music feature | parent work | :white_check_mark: | :white_check_mark: |:white_check_mark: | | | |:white_check_mark: | | :white_check_mark: | :white_check_mark: | +| Annotation | related feature | :white_check_mark: | | | | | | :white_check_mark: | | | :white_check_mark: | +| Place | name | | | :white_check_mark: | | | |:white_check_mark: | | :white_check_mark: | | +| Instrument | type | :white_check_mark: | :white_check_mark: |:white_check_mark: | | | | | :white_check_mark: | | | +| Instrument | builder | | :white_check_mark: |:white_check_mark: | | | | | :white_check_mark: | | | +| Instrument | builder's place | | :white_check_mark: |:white_check_mark: | | | | | :white_check_mark: | | | +| Instrument | date | | :white_check_mark: | :white_check_mark: | | | | | :white_check_mark: | | | +| Instrument | place of production | | :white_check_mark: | :white_check_mark: | | | | | :white_check_mark: | | | +| Instrument | component | | :white_check_mark: |:white_check_mark: | | | | | :white_check_mark: | | | +| Instrument | material | | :white_check_mark: |:white_check_mark: | | | | | :white_check_mark: | | | +| Musical performance | time | | :white_check_mark: | :white_check_mark: | | | :white_check_mark: | :white_check_mark: | | | | +| Musical performance | place | | :white_check_mark: | :white_check_mark: | | | :white_check_mark: | :white_check_mark: | | | | +| Musical performance | performer | :white_check_mark: | :white_check_mark: |:white_check_mark: | | | :white_check_mark: | :white_check_mark: | | | | +| Musical performance | musical composition | :white_check_mark: | | :white_check_mark: | | | :white_check_mark: | :white_check_mark: | | | | +| Musical performance | mediums of performance collection | | |:white_check_mark: | | | :white_check_mark: | :white_check_mark: | | | | +| Performer | role | | :white_check_mark: |:white_check_mark: | |:white_check_mark: | :white_check_mark: | :white_check_mark: | | | | +| Performer | medium of performance | | :white_check_mark: |:white_check_mark: | | | :white_check_mark: | :white_check_mark: | | | | +| | | | | | | | | | | | | | diff --git a/content/_spice-h2020/ecosystem/content/_polifonia-project/web_portal/apis/README.md b/content/_spice-h2020/ecosystem/content/_polifonia-project/web_portal/apis/README.md new file mode 100644 index 00000000..5080691c --- /dev/null +++ b/content/_spice-h2020/ecosystem/content/_polifonia-project/web_portal/apis/README.md @@ -0,0 +1,17 @@ +# web portal APIs +Polifonia datasets Web APIs + +## Requirements + +RAMOSE [requirements](https://github.com/opencitations/ramose/blob/master/requirements.txt) + +## Run + +`python3 ramose.py -s -w :` + +where `` is one or more hf files, e.g. `musow.hf` + +## TODO + + * run ramose with web portal; + * use config.json file OR parse folder /apis to check which apis to create diff --git a/content/_spice-h2020/ecosystem/content/_smashub/choco/LICENSE.md b/content/_spice-h2020/ecosystem/content/_smashub/choco/LICENSE.md new file mode 100644 index 00000000..0debbdc7 --- /dev/null +++ b/content/_spice-h2020/ecosystem/content/_smashub/choco/LICENSE.md @@ -0,0 +1 @@ +ChoCo follows a dual licence scheme to comply with the data sharing strategies of the original collections. Data and code are released under the [Attribution 4.0 International (CC BY 4.0)](https://creativecommons.org/licenses/by/4.0/), with the exception of data derived from *Chordify Annotator Subjectivity Dataset*, *Mozart Piano Sonata*, and *Jazz Audio-Aligned Harmony* data. The latter are released under the [Attribution-NonCommercial-ShareAlike 4.0 International (CC BY-NC-SA 4.0)](https://creativecommons.org/licenses/by-nc-sa/4.0/). \ No newline at end of file diff --git a/content/_spice-h2020/ecosystem/content/_smashub/choco/README.md b/content/_spice-h2020/ecosystem/content/_smashub/choco/README.md new file mode 100644 index 00000000..4742e576 --- /dev/null +++ b/content/_spice-h2020/ecosystem/content/_smashub/choco/README.md @@ -0,0 +1,249 @@ + +

+ +

+ +# ChoCo: the Chord Corpus + +ChoCo provides 20K+ timed chord annotations of scores and tracks, that were integrated, standardised, and semantically enriched from a number of repositories and databases, for a variety of genres and styles (see [overview](#overview)). + +The harmonic annotations in ChoCo are released in 2 different formats: +- As a [JAMS](https://jams.readthedocs.io) dataset, where audio and score annotations are distinguished by the `type` attribute in their `Sandbox`; and temporal/metrical information is expressed in seconds (for audio) and measure:beat (for scores); +- As a [Knowledge Graph](https://en.wikipedia.org/wiki/Knowledge_graph), based on our [JAMS ontology](https://github.com/polifonia-project/jams-ontology) to model music annotations, and on the [Chord](https://motools.sourceforge.net/chord_draft_1/chord.html) and [Roman](https://github.com/polifonia-project/roman-chord-ontology) ontologies to semantically describe chords; a SPARQL endpoint is available at [this link](https://polifonia.disi.unibo.it/choco/sparql). + +

+ +

+ +To achieve consistency across annotations, chords are casted to the following 2 notational families: (i) [Harte](https://ismir2005.ismir.net/proceedings/1080.pdf), generalising Leadsheet-based notations and extensively used in music information retrieval systems; (ii) [Roman numerals](https://en.wikipedia.org/wiki/Roman_numeral_analysis), a well-known notation standard where chords are named according to their degree. In addition, to achieve interopability, Roman numeral chords are syntactically converted to the Harte notation. This implies that a corresponding Harte annotation is always available for all tracks/pieces in ChoCo. + +The resulting annotations are rich in provenance data, including metadata of the annotated work or track, authors of the annotations, identifiers, and links, etc. We emphasise that the current version of ChoCo only includes high-quality timed chord annotations that were produced by **human** annotators (e.g. music experts, students), or crowdsourced and verified before publication. + +ChoCo also comes with a family of tools for chord parsing and manipulation (*tutorial coming soon!*), together with a data transformation pipeline (a [Smashub](https://smashub.github.io) instance) to include new chord datasets in ChoCo. + +## How to use ChoCo + +### Option 1: using JAMS files + +If you are using the ChoCo as a JAMS dataset and you are using Python, you only need to make sure tha the `jams` library is installed in your system. +```python +pip install jams +``` +After downloading a [release](https://github.com/smashub/choco/releases) of ChoCo, you can read, manipulate, and edit harmonic annotations via the `jams` library (more info at this [link](https://jams.readthedocs.io/en/stable/). +```python +import jams + +# Loading a JAMS file providing chords for "Michelle" by "The Beatles" +audio_jams = jams.load("path_to_choco/jams/isophonics_170.jams") +# Retrieving the first chord annotation (a progression) from the JAMS file +chord_ann = audio_jams.annotations.search(namespace="chord")[0] +# Printing the first 10 chords in the annotation/progression +print(chord_ann.data[:10]) +``` +Which produces the following output. +``` +[Observation(time=0.0, duration=0.421247, value='N', confidence=1.0), + Observation(time=0.421247, duration=0.994128, value='F:min/5', confidence=1.0), + Observation(time=1.415375, duration=0.959432, value='E:aug', confidence=1.0), + Observation(time=2.374807, duration=1.010068, value='F:min7', confidence=1.0), + Observation(time=3.384875, duration=0.986848, value='F:min6/5', confidence=1.0), + Observation(time=4.371723, duration=1.085346, value='C#:maj7/3', confidence=1.0), + Observation(time=5.457069, duration=0.459543, value='Bb:min/5', confidence=1.0), + Observation(time=5.916612, duration=0.521956, value='C#/3', confidence=1.0), + Observation(time=6.438568, duration=2.031476, value='C', confidence=1.0), + Observation(time=8.470045, duration=2.101406, value='F', confidence=1.0)] + ``` + +### Option 2: using the RDF files + +Another option is to work on ChoCo's Knowledge Graph and use the RDF files in the release folder; or simply query our [SPARQL endpoint](https://polifonia.disi.unibo.it/choco/sparql). For example, the output of the Python snippet above can be obtained with a SPARQL query to the endpoint (see the query below), which returns [this output](https://polifonia.disi.unibo.it/choco/sparql?query=PREFIX+jams%3A+%3Chttp%3A%2F%2Fw3id.org%2Fpolifonia%2Fontology%2Fjams%2F%3E%0APREFIX+mp%3A++%3Chttp%3A%2F%2Fw3id.org%2Fpolifonia%2Fontology%2Fmusical-performance%2F%3E%0APREFIX+mc%3A++%3Chttp%3A%2F%2Fw3id.org%2Fpolifonia%2Fontology%2Fmusical-composition%2F%3E%0APREFIX+rdfs%3A+%3Chttp%3A%2F%2Fwww.w3.org%2F2000%2F01%2Frdf-schema%23%3E%0A%0ASELECT+DISTINCT+%3FobservationValue+%3FstartTime+%3Fduration%0AWHERE+%7B%0A++%3Frecording+a+mp%3ARecording+%3B%0A++++mc%3AhasTitle+%22Michelle%22+%3B%0A++++jams%3AhasJAMSAnnotation+%3Fannotation+.%0A++%3Fannotation+jams%3AincludesObservation+%3Fobservation+.%0A++%3Fobservation+rdfs%3Alabel+%3FobservationValue+%3B%0A++++jams%3AhasMusicTimeInterval+%5Bjams%3AhasMusicTimeDuration+%5B+jams%3AhasValue+%3Fduration+%5D+%3B%0A++++++jams%3AhasMusicTimeStartIndex+%5B+jams%3AhasMusicTimeIndexComponent+%5B+jams%3AhasValue+%3FstartTime+%5D%5D%0A++++++++++++++++++++++++++++++%5D+.%0A%7D+%0AORDER+BY+(%3FstartTime)%0ALIMIT+10) (the first 10 chords of Michelle, ordered by onset). + +```sparql +PREFIX jams: +PREFIX mp: +PREFIX mc: +PREFIX rdfs: + +SELECT DISTINCT ?observationValue ?startTime ?startTimeType ?duration ?durationType +WHERE { + ?recording a mp:Recording ; + mc:hasTitle "Michelle" ; + jams:hasJAMSAnnotation ?annotation . + ?annotation jams:includesObservation ?observation ; + jams:hasAnnotationType "chord" . + ?observation rdfs:label ?observationValue ; + jams:hasMusicTimeInterval [jams:hasMusicTimeDuration [ jams:hasValue ?duration ; jams:hasValueType ?durationType ] ; + jams:hasMusicTimeStartIndex [ jams:hasMusicTimeIndexComponent [ jams:hasValue ?startTime ; jams:hasValueType ?startTimeType ]]] . +} +ORDER BY (?startTime) +LIMIT 10 +``` + +## Overview + +The current version of ChoCo contains 20,080 JAMS files: 2,283 from the audio partitions, and 17,803 collected from symbolic music. +In turn, these JAMS files provide 60263 different annotations: 20,530 chord annotations in the Harte notation, and 20,029 annotations of tonality and modulations -- hence spanning both local and global keys, when available. +Besides the harmonic content, ChoCo also provides 554 structural annotations (structural segmentations related to music form) and 286 beat annotations (temporal onsets of beats) for the audio partitions. + +| **Partition** | **Type** | **Notation** | **Original format** | **Annotations** | **Genres** | **References** | +|----------------------|----------|---------------|---------------------|------------------|------------|:----------------:| +| Isophonics | A | Harte | LAB | 300 | pop, rock | [1] | +| JAAH | A | Harte | JSON | 113 | jazz | [2] | +| Schubert-Winterreise | A, S | Harte | csv | 25 (S), 25*9 (A) | classical | [3] | +| Billboard | A | Harte | LAB, txt | 890 (740) | pop | [4] | +| Chordify | A | Harte | JAMS | 50*4 | pop | [5] | +| Robbie Williams | A | Harte | LAB, txt | 61 | pop | [6] | +| The Real Book | S | Harte | LAB | 2486 | jazz | [7] | +| Uspop 2002 | A | Harte | LAB | 195 | pop | [8] | +| RWC-Pop | A | Harte | LAB | 100 | pop | [9] | +| Weimar Jazz Database | A | Leadsheet | SQL | 456 | jazz | [10] | +| Wikifonia | S | Leadsheet | mxl | 6500+ | various | [11] | +| iReal Pro | S | Leadsheet | iReal | 2000+ | various | [12] | +| Band-in-a-Box | S | Leadsheet | mgu, sku | 5000+ | various | [13] | +| When in Rome | S | Roman | RomanText | 450 | classical | [14] | +| Rock Corpus | S | Roman | har | 200 | rock | [15] | +| Mozart Piano Sonata | S | Roman | DCMLab | 54 (18) | classical | [16] | +| Jazz Corpus | S | Hybrid | txt | 76 | jazz | [17] | +| Nottingham | S | ABC | ABC | 1000+ | folk | [18] | + +The average duration of the annotated music pieces is $191.29 \pm 85.04$ seconds for (audio) tracks, and $135.02 \pm 162.27$ measures for symbolic music. +This provides a heterogeneous corpus with a large extent of variability in the duration of pieces, which also confirms the diversity of musical genres in ChoCo. +Additional statistics can be found from [this](https://github.com/smashub/choco/blob/main/notebooks/dataset_stats.ipynb) Jupyter notebook. + + +## Transformation workflow + +

+ +

+ +**Step 1: Jamification** +>🧩 Achieving interoperability among annotation standards. + +Considering the diversity of annotation formats and conventions for data organisation (the way content is scattered across folders, files, database tables, etc.), each chord dataset in ChoCo undergoes a standardisation process finalised to the creation of a JAMS dataset. +This is needed to aggregate all relevant annotations of a piece (chord, keys, etc.) in a single JAMS file, and to extract content metadata from relevant sources. + + +**Step 2: Conversion** +>🔓 Achieving interoperability among chord notations. + +The Chonverter module performs two central tasks to enable the interoperability of datasets at the chord level: (i) casting dataset-specific (often niche) chord notations to their reference notation family (either Leadsheet/Harte, Roman numerals, pitched chords); (ii) conversion to Harte. +This allows processing all chord annotations in ChoCo under the same language. + +**Step 3: Knowledge Graph creation**. +>🔗 Releasing musical knowledge that can be linked to other resources on the Web. + +Finally, two key components of Smashub are used to generate a Musical Knowledge Graph from the standardised and enriched JAMS files: (i) the [JAMS ontology](https://github.com/polifonia-project/jams-ontology), together with namespace-specific ontologies that can semantically describe the actual content of chord progressions, according to ChoCo's notations -- [Harte](https://motools.sourceforge.net/chord_draft_1/chord.html) and [Roman](https://github.com/polifonia-project/roman-chord-ontology); (ii) the `jams2rdf` Python module, that implement the aforementioned process via [SPARQL Anything](https://github.com/SPARQL-Anything/sparql.anything), a state of the art tool for Semantic Web re-engineering. + + +## Install + +### Option 1: Local Install + +If you want to use ChoCo as a Python library in projects, first clone the repository and install the requirements through conda or pip. This may take a while, as the repository currently contains the original raw partitions for reproducibility. Also, some users encountered naming issues in the Wikifonia partition on Windows systems. If you find any issue in the codebase, please open an issue. +``` +git clone https://github.com/jonnybluesman/choco.git +``` +In your environment, install the requirements throguh `pip` (in your conda environment). +``` +pip install -r requirements.txt +``` + +### Option 2: Docker Install + +ChoCo can be used using the official Docker image. However, the functionality of the Docker image is currently limited to the creation of a customised dataset. + +To use the image, it is necessary to pull from DockerHub: + +```bash +docker pull andreamust/choco:latest +``` + +To create the bespoke dataset, simply launch a Docker container: + +```bash +docker run -it -v ":/app/data" -e INCLUDE="" -e EXCLUDE="" -e JAMS_VERSION="" -e WORKERS=1 +``` + +The container exposes a bind mount (``) in which the generated dataset and its metadata are saved. The bind mount must be specified using an absolute path on your system. +The other parameters are defined as follows: +- `INCLUDE`: the name of the ChoCo datasets to include in the custom dataset (to be left blank if `EXCLUDE` is specified); +- `EXCLUDE`: the name of the ChoCo datasets to exclude in the custom dataset (to be left blank if `INCLUDE` is specified); +- `JAMS_VERSION`: the type of JAMS files to be added to the custom dataset (either "original" or "converted"); +- `WORKERS`: number of CPU cores to be used in the data processing (default 1). + + +## Contributing + +We are more than happy to extend ChoCo with your annotations/datasets. To contribute, make sure that your workflow is consistent with ChoCo's transformation pipeline and submit a pull request when you are ready. Please send us an email for questions if you have questions on our code of conduct, of if the process for submitting pull requests is unclear. + +Our versioning strategy follows a `X.Y.Z` convention where: `Z` is used for minor revisions and improvements; `Y` increments whenever major changes are made (e.g. annotation formats and conventions); `X` is used when new data/collections are made available in ChoCo. + +## Authors and attribution +[![DOI](https://zenodo.org/badge/462698362.svg)](https://zenodo.org/badge/latestdoi/462698362) + +* **Jacopo de Berardinis** - [King's College London](https://jonnybluesman.github.io) +* **Andrea Poltronieri** - [Università degli Studi di Bologna](https://andreapoltronieri.org) +* **Albert Meroño-Peñuela** - [King's College London](https://www.albertmeronyo.org) +* **Valentina Presutti** - [Università degli Studi di Bologna](https://www.unibo.it/sitoweb/valentina.presutti) + +``` +@inproceedings{deberardinis2023choco, + title={ChoCo: a Chord Corpus and a Data Transformation Workflow for Musical Harmony Knowledge Graphs}, + author={de Berardinis, Jacopo and Meroño-Peñuela, Albert and Poltronieri, Andrea and Presutti, Valentina}, + booktitle={Manuscript under review}, + year={2023} +} +``` + +## Acknowledgments + +We thank all the annotators for contributing to the project. This project is an output of [Polifonia](https://polifonia-project.eu), and has received funding from the European Union’s Horizon 2020 research and innovation programme under grant agreement No 101004746. + + + +## License + +ChoCo follows a dual licence scheme to comply with the data sharing strategies of the original collections. Data and code are released under the [Attribution 4.0 International (CC BY 4.0)](https://creativecommons.org/licenses/by/4.0/), with the exception of data derived from *Chordify Annotator Subjectivity Dataset*, *Mozart Piano Sonata*, and *Jazz Audio-Aligned Harmony* data. The latter are released under the [Attribution-NonCommercial-ShareAlike 4.0 International (CC BY-NC-SA 4.0)](https://creativecommons.org/licenses/by-nc-sa/4.0/). Please, contact us if you have any doubt or issue concerning our data release strategy. + +--- + +## References + +[1] Mauch, M., Cannam, C., Davies, M., Dixon, S., Harte, C., Kolozali, S., Tidhar, D., Sandler, M.: Omras2 metadata project 2009. In: 12th International Society for Music Information Retrieval Conference. ISMIR + +[2] Eremenko, V., Demirel, E., Bozkurt, B., Serra, X.: Jaah: Audio-aligned jazz harmony dataset (Jun 2018), https://doi.org/10.5281/zenodo.1290 + +[3] Weiß, C., Zalkow, F., Arifi-Müller, V., Müller, M., Koops, H.V., Volk, A., Grohganz, H.G.: Schubert winterreise dataset: A multimodal scenario for music analysis. Journal on Computing and Cultural Heritage (JOCCH) 14(2), 1–18 (2021) + +[4] Burgoyne, J.A., Wild, J., Fujinaga, I.: An expert ground truth set for audio chord recognition and music analysis. In: ISMIR. vol. 11, pp. 633–638 (2011) + +[5] Koops, H.V., de Haas, W.B., Burgoyne, J.A., Bransen, J., Kent-Muller, A., Volk, A.: Annotator subjectivity in harmony annotations of popular music. Journal of New Music Research 48(3), 232–252 (2019), https://doi.org/10.1080/09298215.2019.1613436 + +[6] Di Giorgi, B., Zanoni, M., Sarti, A., Tubaro, S.: Automatic chord recognition based on the probabilistic modeling of diatonic modal harmony. In: nDS’13; Proceedings of the 8th International Workshop on Multidimensional Systems. pp. 1–6. VDE (2013) + +[7] Mauch, M., Dixon, S., Harte, C., et al.: Discovering chord idioms through beatles and real book songs (2007) + +[8] Berenzweig, A., Logan, B., Ellis, D.P., Whitman, B.: A large-scale evaluation of acoustic and subjective music-similarity measures. Computer Music Journal pp. 63–76 (2004) + +[9] Goto, M., Hashiguchi, H., Nishimura, T., Oka, R.: Rwc music database: Popular, classical and jazz music databases. In: Ismir. vol. 2, pp. 287–288 (2002) + +[10] Pfleiderer, M., Frieler, K., Abeßer, J., Zaddach, W.G., Burkhart, B. (eds.): Inside the Jazzomat - New Perspectives for Jazz Research. Schott Campus (2017) + +[11] Wikifonia page on Wikipedia (discountined project) https://en.wikipedia.org/wiki/Wikifonia + +[12] iReal Pro public playlists https://www.irealpro.com/main-playlists + +[13] De Haas, W.B., Robine, M., Hanna, P., Veltkamp, R.C., Wiering, F.: Comparing approaches to the similarity of musical chord sequences. In: International Sympo- sium on Computer Music Modeling and Retrieval. pp. 242–258. Springer (2010) + +[14] Micchi, G., Gotham, M., Giraud, M.: Not all roads lead to rome: Pitch represen- tation and model architecture for automatic harmonic analysis. Transactions of the International Society for Music Information Retrieval (TISMIR) 3(1), 42–54 (2020) + +[15] De Clercq, T., Temperley, D.: A corpus analysis of rock harmony. Popular Music 30(1), 47–70 (2011) + +[16] Hentschel, J., Neuwirth, M., Rohrmeier, M.: The annotated mozart sonatas: Score, harmony, and cadence. Transactions of the International Society for Music Infor- mation Retrieval 4(1) (2021) + +[17] Granroth-Wilding, M., Steedman, M.: A robust parser-interpreter for jazz chord sequences. Journal of New Music Research 43(4), 355–374 (2014) + +[18] Nottingham database. https://ifdo.ca/~seymour/nottingham/nottingham.html, accessed: 2022-05-05 + + diff --git a/content/_spice-h2020/ecosystem/content/_smashub/choco/assets/header.md b/content/_spice-h2020/ecosystem/content/_smashub/choco/assets/header.md new file mode 100644 index 00000000..a7ba717d --- /dev/null +++ b/content/_spice-h2020/ecosystem/content/_smashub/choco/assets/header.md @@ -0,0 +1,131 @@ +--- +id: ChoCo +Name: "ChoCo: the Chord Corpus" +brief-description: A large dataset for musical harmony knowledge graphs. +type: Dataset, Knowledge Graph +release-date: 13-10-2022 +release-number: v0.1 +release-link: https://github.com/smashub/choco/releases/tag/data-v0.1.0 +work-package: WP2 +pilot: INTERLINK +keywords: + - chords + - harmony +changelog: n/a. +licence: + - CC-BY + - CC-BY-NC +image: https://github.com/smashub/choco/raw/main/assets/choco_main.png +logo: https://github.com/smashub/choco/raw/main/assets/choco_logo_a.png +demo: https://projects.dharc.unibo.it/melody/choco/chord_corpus_statistics +links: + - https://github.com/smashub/choco/blob/main/void.ttl + - https://github.com/smashub/choco/blob/main/notebooks/dataset_stats.ipynb +running-instance: https://polifonia.disi.unibo.it/choco/sparql +credits: J. de Berardinis (KCL), A. Poltronieri (UniBo), A. Meroño Peñuela (KCL), V. Presutti (UniBo) +related-components: + - ontologies + - JAMS Ontology + - Roman Chord Ontology + - Chord Ontology +bibliography: + - "de Berardinis, Jacopo; Meroño Peñuela, Albert; Poltronieri, Andrea; Presutti, Valentina. ChoCo: a Chord Corpus and a Data Transformation Workflow for Musical Harmony Knowledge Graphs (manuscript in progress)." + - "de Berardinis, Jacopo; Meroño Peñuela, Albert; Poltronieri, Andrea; Presutti, Valentina. The Music Annotation Pattern. In The 13th Workshop on Ontology Design and Patterns (WOP2022) in conjunction with the International Semantic Web Conference (ISWC)." +--- + +# ChoCo: the Chord Corpus + +
+ +
+ +[ChoCo](https://github.com/smashub/choco/) is a music dataset and a [Knowledge Graph](https://en.wikipedia.org/wiki/Knowledge_graph) providing 20K+ timed chord annotations of scores and tracks. To compile ChoCo, we integrated, standardised, and semantically enriched from a number of repositories and databases, covering a variety of genres and styles (more info [below](#overview)). + +To achieve consistency across annotations, chords are casted to the following 2 notational families: (i) [Harte](https://ismir2005.ismir.net/proceedings/1080.pdf), generalising Leadsheet-based notations and extensively used in music information retrieval systems; (ii) [Roman numerals](https://en.wikipedia.org/wiki/Roman_numeral_analysis), a well-known notation standard where chords are named according to their degree. In addition, to achieve interopability, Roman numeral chords are syntactically converted to the Harte notation. This implies that a corresponding Harte annotation is always available for all tracks/pieces in ChoCo. + +The resulting annotations are rich in provenance data, including metadata of the annotated work or track, authors of the annotations, identifiers, and links, etc. We emphasise that the current version of ChoCo only includes high-quality timed chord annotations that were produced by **human** annotators (e.g. music experts, students), or crowdsourced and verified before publication. + +
+ +
+ +ChoCo also comes with a family of tools for chord parsing and manipulation (*tutorial coming soon!*), together with a data transformation pipeline (a [Smashub](https://smashub.github.io) instance) to include new chord datasets in ChoCo. + +> ℹ️ For more info, please visit [ChoCo](https://github.com/smashub/choco/) on GitHub! + +The harmonic annotations in ChoCo are released in 2 different formats: JAMS and RDF. + +## ChoCo is a JAMS dataset + +ChoCo is released as a [JAMS](https://jams.readthedocs.io) dataset, where audio and score annotations are distinguished by the `type` attribute in their `Sandbox`; and temporal/metrical information is expressed in seconds (for audio) and measure:beat (for scores); + +## ChoCo is also a Knowledge Graph +ChoCo also comes as a Knowledge Graph, based on our [JAMS ontology](https://github.com/polifonia-project/jams-ontology) to model music annotations, and on the [Chord](https://motools.sourceforge.net/chord_draft_1/chord.html) and [Roman](https://github.com/polifonia-project/roman-chord-ontology) ontologies to semantically describe chords; a SPARQL endpoint is available at [this link](https://polifonia.disi.unibo.it/choco/sparql). + + +## Content + +The current version of ChoCo contains 20,280 JAMS files: 2,283 from the audio partitions, and 17,997 collected from symbolic music. +In turn, these JAMS files provide 42,187 different annotations: 20,924 chord annotations in the Harte notation, and 20,423 annotations of tonality and modulations -- hence spanning both local and global keys, when available. +Besides the harmonic content, ChoCo also provides 554 structural annotations (structural segmentations related to music form) and 286 beat annotations (temporal onsets of beats) for the audio partitions. + +| **Partition** | **Type** | **Notation** | **Original format** | **Annotations** | **Genres** | **References** | +|----------------------|----------|---------------|---------------------|------------------|------------|:----------------:| +| Isophonics | A | Harte | LAB | 300 | pop, rock | [1] | +| JAAH | A | Harte | JSON | 113 | jazz | [2] | +| Schubert-Winterreise | A, S | Harte | csv | 25 (S), 25*9 (A) | classical | [3] | +| Billboard | A | Harte | LAB, txt | 890 (740) | pop | [4] | +| Chordify | A | Harte | JAMS | 50*4 | pop | [5] | +| Robbie Williams | A | Harte | LAB, txt | 61 | pop | [6] | +| The Real Book | S | Harte | LAB | 2486 | jazz | [7] | +| Uspop 2002 | A | Harte | LAB | 195 | pop | [8] | +| RWC-Pop | A | Harte | LAB | 100 | pop | [9] | +| Weimar Jazz Database | A | Leadsheet | SQL | 456 | jazz | [10] | +| Wikifonia | S | Leadsheet | mxl | 6500+ | various | [11] | +| iReal Pro | S | Leadsheet | iReal | 2000+ | various | [12] | +| Band-in-a-Box | S | Leadsheet | mgu, sku | 5000+ | various | [13] | +| When in Rome | S | Roman | RomanText | 450 | classical | [14] | +| Rock Corpus | S | Roman | har | 200 | rock | [15] | +| Mozart Piano Sonata | S | Roman | DCMLab | 54 (18) | classical | [16] | +| Jazz Corpus | S | Hybrid | txt | 76 | jazz | [17] | +| Nottingham | S | ABC | ABC | 1000+ | folk | [18] | + +
+ +## References + +[1] Mauch, M., Cannam, C., Davies, M., Dixon, S., Harte, C., Kolozali, S., Tidhar, D., Sandler, M.: Omras2 metadata project 2009. In: 12th International Society for Music Information Retrieval Conference. ISMIR + +[2] Eremenko, V., Demirel, E., Bozkurt, B., Serra, X.: Jaah: Audio-aligned jazz harmony dataset (Jun 2018), https://doi.org/10.5281/zenodo.1290 + +[3] Weiß, C., Zalkow, F., Arifi-Müller, V., Müller, M., Koops, H.V., Volk, A., Grohganz, H.G.: Schubert winterreise dataset: A multimodal scenario for music analysis. Journal on Computing and Cultural Heritage (JOCCH) 14(2), 1–18 (2021) + +[4] Burgoyne, J.A., Wild, J., Fujinaga, I.: An expert ground truth set for audio chord recognition and music analysis. In: ISMIR. vol. 11, pp. 633–638 (2011) + +[5] Koops, H.V., de Haas, W.B., Burgoyne, J.A., Bransen, J., Kent-Muller, A., Volk, A.: Annotator subjectivity in harmony annotations of popular music. Journal of New Music Research 48(3), 232–252 (2019), https://doi.org/10.1080/09298215.2019.1613436 + +[6] Di Giorgi, B., Zanoni, M., Sarti, A., Tubaro, S.: Automatic chord recognition based on the probabilistic modeling of diatonic modal harmony. In: nDS’13; Proceedings of the 8th International Workshop on Multidimensional Systems. pp. 1–6. VDE (2013) + +[7] Mauch, M., Dixon, S., Harte, C., et al.: Discovering chord idioms through beatles and real book songs (2007) + +[8] Berenzweig, A., Logan, B., Ellis, D.P., Whitman, B.: A large-scale evaluation of acoustic and subjective music-similarity measures. Computer Music Journal pp. 63–76 (2004) + +[9] Goto, M., Hashiguchi, H., Nishimura, T., Oka, R.: Rwc music database: Popular, classical and jazz music databases. In: Ismir. vol. 2, pp. 287–288 (2002) + +[10] Pfleiderer, M., Frieler, K., Abeßer, J., Zaddach, W.G., Burkhart, B. (eds.): Inside the Jazzomat - New Perspectives for Jazz Research. Schott Campus (2017) + +[11] Wikifonia page on Wikipedia (discountined project) https://en.wikipedia.org/wiki/Wikifonia + +[12] iReal Pro public playlists https://www.irealpro.com/main-playlists + +[13] De Haas, W.B., Robine, M., Hanna, P., Veltkamp, R.C., Wiering, F.: Comparing approaches to the similarity of musical chord sequences. In: International Sympo- sium on Computer Music Modeling and Retrieval. pp. 242–258. Springer (2010) + +[14] Micchi, G., Gotham, M., Giraud, M.: Not all roads lead to rome: Pitch represen- tation and model architecture for automatic harmonic analysis. Transactions of the International Society for Music Information Retrieval (TISMIR) 3(1), 42–54 (2020) + +[15] De Clercq, T., Temperley, D.: A corpus analysis of rock harmony. Popular Music 30(1), 47–70 (2011) + +[16] Hentschel, J., Neuwirth, M., Rohrmeier, M.: The annotated mozart sonatas: Score, harmony, and cadence. Transactions of the International Society for Music Infor- mation Retrieval 4(1) (2021) + +[17] Granroth-Wilding, M., Steedman, M.: A robust parser-interpreter for jazz chord sequences. Journal of New Music Research 43(4), 355–374 (2014) + +[18] Nottingham database. https://ifdo.ca/~seymour/nottingham/nottingham.html, accessed: 2022-05-05 \ No newline at end of file diff --git a/content/_spice-h2020/ecosystem/content/_smashub/choco/partitions/biab-internet-corpus/raw/README.md b/content/_spice-h2020/ecosystem/content/_smashub/choco/partitions/biab-internet-corpus/raw/README.md new file mode 100644 index 00000000..30f7bdcb --- /dev/null +++ b/content/_spice-h2020/ecosystem/content/_smashub/choco/partitions/biab-internet-corpus/raw/README.md @@ -0,0 +1 @@ +This folder should contain the original Band-in-a-Box files from the BiaB Internet Corpus created by Bas de Haas. However, these files cannot be re-distributed here due to copyright and licensing. If you would like to replicate the extraction of chords from the BiaB partition, as a step for the creation of ChoCo, please contact Bas de Haas to get access to the original data. diff --git a/content/_spice-h2020/ecosystem/content/_smashub/choco/partitions/chordify/readme.md b/content/_spice-h2020/ecosystem/content/_smashub/choco/partitions/chordify/readme.md new file mode 100644 index 00000000..535fdbe3 --- /dev/null +++ b/content/_spice-h2020/ecosystem/content/_smashub/choco/partitions/chordify/readme.md @@ -0,0 +1,142 @@ +![](img/Chordify.png)![](img/uu.png) + +# Chordify Annotator Subjectivity Dataset + +Reference annotation datasets containing single harmony annotations are at the core of a wide range of studies in Music Information Retrieval and related fields. However, a lot of properties of music are subjective, and **annotator subjectivity** found among multiple reference annotations is (usually) not taken into account. + +Currently available chord-label annotation datasets containing more than one reference annotation are limited by size, sampling strategy, or lack of a standardized encoding. + +Therefore, to advance research into **annotator subjectivity** and computational harmony (such as Automatic Chord Estimation), we release the *Chordify Annotator Subjectivity Dataset (CASD)*, containing **multiple** expert reference annotations. + +## Overview + +This respository releases the **Chordify Annotator Subjectivity Dataset**, containing reference annotations for: + +* **Fifty songs** from the [Billboard](http://ddmal.music.mcgill.ca/research/billboard) dataset [1] that + * have a stable on-line presence in widely accessible music repositories + * can be compared against the *Billboad* annotations +* Each song is annotated by **four expert annotators** + * The annotations are encoded in [JAMS](https://github.com/marl/jams) format [2] + * Chord labels are encoded in standard [Harte et al.](#references) syntax [3] + * Annotations include *reported difficulty* (on a 5 point Likert scale, where 1 is easy and 5 is hard) and *annotation time* (in minutes) for each annotator + +### How to access the annotations + +(pip) install the [JAMS](https://github.com/marl/jams) python module to read the annotations. To work with the annotations, load an annotation file: + +``` +jam = jams.load('12.jams') +``` + +To access the annotations from the first annotator: + +``` +jam['annotations'][0]['data'] +``` + +For further details on how to manupulate and work with JAMS files, we refer to the [JAMS documentation](http://pythonhosted.org/jams/index.html). + +## Annotator Subjectivity + +We find that within the CASD, annotators disagree about chord labels. The next figure gives a nice intuitive idea of the disagreement. + +![](img/92_chroma.png) + +This figure shows the [*chromagram*](https://en.wikipedia.org/wiki/Chroma_feature) of the annotators for song 92 in the dataset. The horizontal axis represents time, the vertical axis represents the 12 pitch classes of a single octave. The figure shows that the annotators differ in level of detail in time, as well as in pitch classes per chord. This figure was generated with [this script](misc/plot_chromas.py). + +### Research on Annotator Subjectivity + +If you are interested in a detailed analysis of the annotator subjectivity found in the CASD, please refer to our publication in the *Journal of New Music Research*: + +Hendrik Vincent Koops, W. Bas de Haas, John Ashley Burgoyne, Jeroen Bransen, Anna Kent-Muller & Anja Volk (2019) [Annotator subjectivity in harmony annotations of popular music](https://www.tandfonline.com/doi/full/10.1080/09298215.2019.1613436), *Journal of New Music Research*, 48:3, 232-252, DOI: 10.1080/09298215.2019.1613436 + +```tex +@article{doi:10.1080/09298215.2019.1613436, +author = {Hendrik Vincent Koops and W. Bas de Haas and John Ashley Burgoyne and Jeroen Bransen and Anna Kent-Muller and Anja Volk}, +title = {Annotator subjectivity in harmony annotations of popular music}, +journal = {Journal of New Music Research}, +volume = {48}, +number = {3}, +pages = {232-252}, +year = {2019}, +publisher = {Routledge}, +doi = {10.1080/09298215.2019.1613436}, +URL = {https://doi.org/10.1080/09298215.2019.1613436}, +eprint = {https://doi.org/10.1080/09298215.2019.1613436} +} +``` + +Please cite this publication if you use the CASD in your research. + +## Contributing + +By way of this repository and JAMS, we encourage the Music Information Retrieval community to exchange, update, and expand the dataset. + +We are more than happy to add your annotations to this dataset. If you are interested in contributing, please keep in mind how these annotations were obtained (see: **Data collection method** below). Using the same data collection methods ensures keeping all the annotations in the dataset uniform and comparable. + +To contribute, submit a pull request. Please send us an email for questions if you have questions on our code of conduct, of if the process for submitting pull requests is unclear. + +## Data collection method + +To ensure the annotators were all focused on the same task, we provided them +with a guideline for the annotating process. We asked them to listen to the +songs as if they wanted to play the song on their instrument in a band, and to +transcribe the chords with this purpose in mind. They were instructed to +assume that the band would have a rhythm section (drum and bass) and melody +instrument (e.g., a singer). Therefore, their goal was to transcribe the +complete harmony of the song in a way that, in their view, best matched their +instrument. + +We used a web interface to provide the annotators with a central, unified +transcription method. This interface provided the annotators with a grid of +beat-aligned elements, which we manually verified for correctness. Chord +labels could be chosen for each beat. The standard YouTube web player was used +to provide the reference recording of the song. Through the interface, the +annotators were free to select any chord of their choice for each beat. While +transcribing, the annotators were able to watch and listen not only to the +YouTube video of the song, but also a synthesized version of their chord +transcription. + +In addition to providing chords and information about their musical background, +we asked the annotators to provide for each song a difficulty rating on a scale +of 1 (easy) to 5 (hard), the amount of time it took them to annotate the song in +minutes, and any remarks they might have on the transcription process. + +## Further Information + +The *Chordify Annotator Subjectivity Dataset* was introduced at the late breaking session at the [18th International Society for Music Information Retrieval Conference](https://ismir2017.smcnus.org/). For more information about the CASD and annotator subjectivity in this dataset, please find the poster and extended abstract below. + +[![Poster](img/ISMIR2017_LBposter.png)](img/ISMIR2017_LBposter.pdf) +[![Abstract](img/ISMIR2017_LBD.png)](https://ismir2017.smcnus.org/lbds/Koops2017.pdf) + +### Journal paper + +In a paper published in the [*Journal of New Music Research*](https://www.tandfonline.com/doi/full/10.1080/09298215.2019.1613436), we provide background information and a statistical analysis of annotator subjectivity in the CASD: + +[![JNMR](https://www.tandfonline.com/na101/home/literatum/publisher/tandf/journals/content/nnmr20/2019/nnmr20.v048.i02/nnmr20.v048.i02/20190301-01/nnmr20.v048.i02.cover.jpg)](https://www.tandfonline.com/doi/full/10.1080/09298215.2019.1613436) + +## Authors + +* **Hendrik Vincent Koops** - [Utrecht University](https://scholar.google.nl/citations?user=rzqMKygAAAAJ&hl) +* **W. Bas de Haas** - [Chordify](https://chordify.net) +* **Jeroen Bransen** - [Chordify](https://chordify.net) +* **John Ashley Burgoyne** - [University of Amsterdam](http://www.uva.nl/profiel/b/u/j.a.burgoyne/j.a.burgoyne.html) +* **Anja Volk** - [Utrecht University](http://www.staff.science.uu.nl/~fleis102/) + +Questions can be addressed to [casd@chordify.net](mailto:casd@chordify.net). + +## License + +![](https://i.creativecommons.org/l/by-nc-sa/4.0/88x31.png) +This work is licensed under a [Creative Commons Attribution-NonCommercial-ShareAlike 4.0 International License](https://creativecommons.org/licenses/by-nc-sa/4.0/). + +## Acknowledgments + +We thank all annotators for contributing to the project. + +## References +[1] John Ashley Burgoyne, Jonathan Wild, and Ichiro Fujinaga, [*An Expert Ground Truth Set for Audio Chord Recognition and Music Analysis*](http://ismir2011.ismir.net/papers/OS8-1.pdf), in Proceedings of the 12th International Society for Music Information Retrieval Conference, pp. 633-38, 2011 + +[2] Humphrey, Eric J., Justin Salamon, Oriol Nieto, Jon Forsyth, Rachel M. Bittner, and Juan Pablo Bello. [*JAMS: A JSON Annotated Music Specification for Reproducible MIR Research.*](http://www.terasoft.com.tw/conf/ismir2014/proceedings/T106_355_Paper.pdf) In Proceedings of the International Society for Music Information Retrieval Conference, pp. 591-596, 2014. + +[3] Harte, C., Sandler, M. B., Abdallah, S. A., & Gómez, E. [*Symbolic Representation of Musical Chords: A Proposed Syntax for Text Annotations.*](http://ismir2005.ismir.net/proceedings/1080.pdf) In Proceedings of the International Society for Music Information Retrieval Conference, pp. 66-71, 2005 diff --git a/content/_spice-h2020/ecosystem/content/_smashub/choco/partitions/mozart-piano-sonatas/raw/harmonies/README.md b/content/_spice-h2020/ecosystem/content/_smashub/choco/partitions/mozart-piano-sonatas/raw/harmonies/README.md new file mode 100644 index 00000000..ebfc6baa --- /dev/null +++ b/content/_spice-h2020/ecosystem/content/_smashub/choco/partitions/mozart-piano-sonatas/raw/harmonies/README.md @@ -0,0 +1,37 @@ +## Columns + +The TSVs in this folder each contain all chord labels of one of the movements with their temporal positions. The column `presence` shows with which parameters of `mozart_loader.py` the column is present, where `raw` means that the column is present in the raw data and will be output with `-H`. The raw data contain only the columns `mc, mn, onset, label`. The derived feature columns are available via `mozart_loader.py [-e]` or, with chord tones added for each label, via one of the parameters `mozart_loader.py [-E][-g][-a][-A]`. + +For all features given as Roman numerals (`localkey, pedal, numeral, relativeroot`), the scale degrees `III, VI, VII` depend on the local mode: In a local minor key (`localkey_is_minor == 1`), they are a minor 3rd/6th/7th above the local tonic, in major, a major 3rd/6th/7th. + +The data type `Int64` stands for integer columns containing NULL values. + +| column | type | presence | description | +|--------------------|------------|-----------|------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------| +| **mc** | integer | raw | Measure count, identifier for the measure units in the XML encoding. Always starts with 1 for correspondence to MuseScore's status bar. | +| **mn** | integer | raw | Measure number, continuous count of complete measures as used in printed editions. Starts with 1 except for pieces beginning with a pickup measure, numbered as 0. | +| **playthrough** | integer | u | For the unfolded representations, a running count of complete measures disambiguating repeated MNs. | +| **timesig** | string | e/E/g/a/A | Time signature of the measure in which the label occurs. | +| **beat** | fraction | e/E/g/a/A | On which beat the label occurs, expressed as string. Downbeat positions are just integers (e.g. `'1'`) and upbeat positions have a fraction of the respective beat size attached, for instance, in `2/2` meter, beat `'1.1/2'` is on the second quarter note and beat `'1.1/3'` is on the second quarter triplet note.
The mapping timesig => beat size is `'2/2' => 1/2, '4/4' => 1/4, '2/4' => 1/4, '3/8' => 1/8, '6/8' => 3/8`} | +| **onset** | fraction | raw | Label's temporal position from beat 1 of the measure, expressed in fractions of a whole note (1/4 = quarter note, 1/12 = triplet eigth, etc.) | +| **label** | string | raw | Original chord label as entered by the annotator. | +| **volta** | Int64 | raw **only** | Disambiguates endings: `1` for first endings, `2` for second endings. This column is present only for disambiguation in the raw data because `mozart_loader.py` has been designed to correctly deal with first and second endings by deleting first endings or unfolding repetitions (parameter `-u`). | +| **alt_label** | string | e/E/g/a/A | Alternative annotation as added by the annotator. | +| **globalkey** | string | e/E/g/a/A | Tonality of the piece, expressed as absolute note name, e.g. 'Ab' for A flat major, or `g#` for G sharp minor. | +| **localkey** | string | e/E/g/a/A | Local key expressed as Roman numeral relative to the `globalkey`, e.g. `IV` for the major key on the 4th scale degree or `#iv` for the minor scale on the raised 4th scale degree. | +| **pedal** | string | e/E/g/a/A | If the chord occurs over a pedal note, this pedal note is expressed as a Roman numeral. If the chord tones are being computed (see below), this additional note is not taken into account and would need to be added as bass note. | +| **chord** | string | e/E/g/a/A | This is simply a compact view of the features that define the chord tones, namely `numeral, form, figbass, changes, relativeroot`. | +| **numeral** | string | e/E/g/a/A | Roman numeral defining the chordal root relative to the local key. An uppercase numeral stands for a major chordal third, lowercase for a minor third. If chord tones are being computed, the column `root` expresses the same information as an absolute interval. | +| **special** | string | e/E/g/a/A | Labels containing special chord names are being replaced and the special names go in this column. Special names can be `Ger, Fr, It` for the three 'geographical chords', i.e. for the augmented sixth chords. | +| **form** | string | e/E/g/a/A | ``: The chord is either a major or minor triad if `figbass` is one of `, '6', '64'`. Otherwise, it is either a major or a minor chord with a minor seventh.
`o, +`: Diminished or augmented chord. Again, it depend on `figbass` whether it is a triad or a seventh chord.
`%, M`: Half diminished or major seventh chord. For the latter, the chord form depends on the Roman numeral. | +| **figbass** | string | e/E/g/a/A | Figured bass notation of the chord inversion. For triads, this feature can be `, '6', '64'`, for seventh chords `'7', '65', '43', '2'`. This features is decisive for which chord tone is in the bass. | +| **changes** | string | e/E/g/a/A | A string containing all **added** intervals, all **replacing** intervals, and all chord tone **alterations**. All intervals are given as arabic numbers standing for the scale degree found above the `numeral` in the current local scale. E.g., `+6`, over the numeral `V`, would add a major sixth in a local major key, and a minor sixth in a local minor key. A minor sixth added to the dominant in a major key would be `+b6`. **Replacing** intervals and **alterations** are not preceded by `+`. Alterations are changes to the chord tones `3, 5, 8, 10, 12`. All other intervals replace a chord tone. If preceded by `#`, they replace the upper neighbor (e.g. `#2` replace the chordal third. Otherwise, they replace the lower neighbour (e.g. `2` replaces the root, `9` replaces the octave). | +| **relativeroot** | string | e/E/a/A | This feature designates a lower-level key to which the current chord relates. It is expressed relative to the local key. For example, if the current numeral is a `V` and it is a secondary dominant, `relativeroot` is the scale degree that is being tonicized. Column is not present if the chord labels are relative to the global key.| +| **phraseend** | string | e/E/g/a/A | If the chord ends a phrase, this feature is `\\`. | +| **chord_type** | string | e/E/g/a/A | A summary of information that otherwise depends on the three columns `numeral, form, figbass`. It can be one of the wide-spread abbreviations for triads: `M, m, o, +` or for seventh chords: `o7, %7, +7` (for diminished, half-diminished and augmented seventh chords), or `Mm7, mm7, MM7, mM7`for all combinations of a major/minor triad with a minor/major seventh. | +| globalkey_is_minor | boolean | e/E/g/a/A | For convenience. `1`: global key is a minor key, `0`: global key is a major key. | +| localkey_is_minor | boolean | e/E/a/A | For convenience. `1`: local key is a minor key, `0`: local key is a major key. Column is not present if the chord labels are relative to the global key.| +| chord_tones | collection | E/a/A | Exactly three tones for triads and four tones for seventh chords. They appear in ascending order, starting from the bass note ('closed form'). Replaced or altered chord tones are taken into account. Tones are expressed as stack-of-fifths intervals where 0 is the local tonic. With parameters `-g` or `-A`, however, 0 is the global tonic. With parameter `-a`, 0 is equal to the absolute pitch of the global key. | +| added_tones | collection | E/a/A | Contains any number of added chord tones. | +| root | integer | E/a/A | Chordal root expressed as stack-of-fifths interval. | +| bass_note | integer | E/a/A | Always the first integer in `chord_tones`. | diff --git a/content/_spice-h2020/ecosystem/content/_smashub/choco/partitions/when-in-rome/raw/OpenScore-LiederCorpus/Boulanger,_Lili/_/Attente/README.md b/content/_spice-h2020/ecosystem/content/_smashub/choco/partitions/when-in-rome/raw/OpenScore-LiederCorpus/Boulanger,_Lili/_/Attente/README.md new file mode 100644 index 00000000..0c1b8b98 --- /dev/null +++ b/content/_spice-h2020/ecosystem/content/_smashub/choco/partitions/when-in-rome/raw/OpenScore-LiederCorpus/Boulanger,_Lili/_/Attente/README.md @@ -0,0 +1,19 @@ + +# Attente + +__A [standalone piece](..) by [Lili Boulanger](../..)__ + +Transcribed and maintained by contributors to [OpenScore Lieder]. + +Please visit the [official score page] for more information. + +[official score page]: https://musescore.com/openscore-lieder-corpus/scores/5983850 +[OpenScore Lieder]: https://musescore.com/openscore-lieder-corpus + +## External links + +- [MuseScore] - view and listen to [this score][MuseScore], or download in a variety of formats. +- [IMSLP] - view the [source PDF file(s)][IMSLP] that this score was transcribed from. + +[MuseScore]: https://musescore.com/score/5983850 +[IMSLP]: https://imslp.org/wiki/Special:ReverseLookup/435483 diff --git a/content/_spice-h2020/ecosystem/content/_smashub/harmory/README.md b/content/_spice-h2020/ecosystem/content/_smashub/harmory/README.md new file mode 100644 index 00000000..e15a21da --- /dev/null +++ b/content/_spice-h2020/ecosystem/content/_smashub/harmory/README.md @@ -0,0 +1,151 @@ +# Harmory: the Harmonic Memory +Harmory is a Knowledge Graph of interconnected harmonic patterns aimed to support creative applications in a fully transparent, accountable, and musically plausible way. + +

+ +

+ +## Harmory in a nutshell +We leverage the [Tonal Pitch Space](https://www.jstor.org/stable/40285402) - a cognitive model of Western tonal harmony to **project** chord progressions into a musically meaningful space. Then, we use novelty-based methods for structural analysis to **segment** chord sequences into meaningful harmonic structures. The latter are then compared with each other, across all progressions and via harmonic similarity, to reveal common/recurring **harmonic patterns**. + +A KG is created to semantically establish relationships between patterns, based on: (i) *temporal links*, connecting two patterns if they are observed consecutively in the same progression; and (ii) *similarity links* among highly-similar patterns. By traversing the KG, and moving across patterns via temporal and similarity links, new progressions can be created in a combinational settings; but also, unexpected and surprising relationships can be found among pieces and composers of different genre, style, and historical period. This is also enabled by the scale and diversity of Harmory, which is built from [ChoCo](https://github.com/smashub/choco), the largest existing collection of harmonic annotations. + +Currently, Harmory contains ~26K harmonic segments from 1800 harmonic (~10% of ChoCo, corresponding to all the audio partitions). Out of all segments: 13667 (16%) correspond to the same pattern families, 66175 (53%) are pattern-friendly (they share non-trivial similarities with other segments), whereas 8176 (32%) are inherently unique (they are found in other songs). More statistics are available at [this link](https://github.com/smashub/harmory/blob/main/harmory/analysis.ipynb). + +## How to re-create Harmory + +### Step 1: Environment setup +If you are using conda, you can now create a new virtual environment from a terminal session, by running the commands below. +``` +conda create -n harmory python=3.9 +conda activate harmory +``` +Now that your environment is created and activated, you can install all required packages by running: +``` +pip install -r requirements.txt +``` +Your environment should now be ready to create Harmory. + +### Step 2: Novelty-based harmonic segmentation +This step will perform harmonic segmentation on each track indexed in `choco_audio.txt` (a selection of ChoCo). The output of the segmentation will be written for each piece in a separate file in `data/structures/v1` in order to facilitate parallel access. Before running this command, please make sure to download a release of ChoCo (from [this link](https://github.com/smashub/choco/releases)), so that you can point to the corresponding JAMS folder (by replacing `../../choco/choco-jams/jams` with your local path). +```bash +python create.py segment ../../choco/choco-jams/jams \ + --selection ../data/samples/choco_audio.txt \ + --out_dir ../data/structures/v1 \ + --n_workers 6 --debug +``` + +### Step 3: Linking harmonic segments via similarity +You can now compute segment-wise similarities to discover harmonic patterns. This is done by running the command below, which will save three important objects: the `similarities.csv` file (containing an edge list of similarity links); an instance of `HarmonicPatternFinder`(to find similarities from given segments/patterns); and a `pattern2id` dictionary mapping pattern IDs to (ChoCo-alike) segment IDs. This provides full tractability of the music material in the memory. + +```bash +python create.py similarities ../data/structures/v1 \ + --out_dir ../data/similarities/v1 \ + --n_workers 6 +``` + +### Step 4: Knowledge Graph creation + +The Knowledge Graph is created from the data generated in [Step 2](#step-2--novelty-based-harmonic-segmentation) and [Step 3](#step-3--linking-harmonic-segments-via-similarity). +The generation of the KG consists of two main steps: +* **Instantiate Tracks**: tracks are instantiated and added to the knowledge graph, together with their metadata and segments; +* **Instantiate Similarity**: SegmentPatterns are instantiated and added to the knowledge graph and linked to the segments to which they refer to. + +The creation of the KG is based on [RDFLib](https://rdflib.readthedocs.io/en/stable/), which allows the graph to be saved in the most common RDF serialisations. + +```bash +python kg_create.py '../../data/structures/v1/' \ +'../../data/similarities/v1/pattern2id.pkl' \ +'../../data/similarities/similarities.csv' \ +'../../data/knowledge-graph/v1/knowledge_graph.ttl' \ +--serialization turtle \ +--n_workers 6 --verbose +``` + +## Experiments +To validate Harmory, we carried out experiments to test the efficacy of the two central components underpinning its creation: the *harmonic similarity* method, and the *harmonic segmentation*. This section provides instructions to reproduce our experiments. + +### Evaluating our harmonic similarity +Our method for harmonic similarity is compared to other state-of-the-art algorithms on the **cover song detection** +task - a common benchmark for similarity algorithms in the symbolic music domain. + +It is possible to compare performance in the cover song detection task using the following algorithms: +* **Dynamic Time Warping** (DTW) - a well-known algorithm for time series alignment; +* **TPSD** - a method based on the Tonal Pitch Space (TPS) model of Western tonal harmony; +* **SoftDTW** - a variant of DTW that allows for local warping of the time series; +* **LCSS** - a method based on the Longest Common Subsequence (LCS) algorithm. + +It is possible to preprocess the chord data using two different variants of the _TPS_ distance: +* **Profile** - the TPS distance is computed between the chord and the local key of the piece; +* **Offset** - the TPS distance is computed between the chord and its preceding chord. + +Moreover, the timeseries produced using both mode of the _TPS_ distance can be normalised both temporally and in terms +of the values contained in the series. +Finally, you can test different constraint settings for the cover song detection task for the _DTW_, _SoftDTW_ and +_LCSS_ algorithms, namely the **Sakoe Chiba** and the **Itakura** constraints. + +For recreating the experiments, you can run the following command: + +```bash +python cover_detection.py '../../exps/datasets/merge/' \ +'../../exps/results/results.csv' +``` + +You can also run different experiments by passing to the script the `--configuration` argument, which is composed by a +list of tuples parameters comma-separated. Each tuple is consists of a sparate experiment, and it is composed by the +following parameters: +`(, , , , )`. + +### Structural coverage of known patterns +Our harmonic segmentation, we measure the overlap between the resulting structures with a collection of well-known chordal patterns. This exemplifies the hypothesis that a good segmentation would maximise the "reuse" of harmonic patterns - as building blocks that can be found in other pieces. + +First, we perform harmonic segmentation with our method (harmov) on a balanced sample of Harmory (`data/samples/medium-audio.txt`) + +```bash +python create.py segment ../../choco/choco-jams/jams \ + --selection ../data/samples/mixed_audio_med.txt \ + --out_dir ../exps/segmentation/medium-audio/harmov/k%8__p%msaf__pml%24__psig%2 \ + --n_workers 6 --debug +``` + +We do the same using our baselines for time series segmentation: *fast low-cost unipotent semantic segmentation* (FLUSS), and a *static uniform split*. The commands below will run a grid search of the former methods, using the parameter space defined in `harmory/baselines.py`. +```bash +python baselines.py ../../choco/choco-jams/jams \ + --baselines fluss_segmentation \ + --selection ../data/samples/mixed_audio_med.txt \ + --out_dir ../exps/segmentation/medium-audio --n_workers 6 + +python baselines.py ../../choco/choco-jams/jams \ + --baselines uniform_split \ + --selection ../data/samples/mixed_audio_med.txt \ + --out_dir ../exps/segmentation/medium-audio --n_workers 6 +``` +Finally, we can evaluate the segmentations by computing the structural coverage of known harmonic patterns (which are stored in `data/known-patterns`). For this, we wrapped all commands in a single script. +``` +bash segval.sh +``` +Results will be saved in the given output folder as a CSV file, where each row contains the structural coverage of a segmentation method, using a specific parameter set, and with respect to all known harmonic patterns of a specific length (ranging from 4 to 10). + + +## Authors and attribution +[![DOI](https://zenodo.org/badge/DOI/10.5281/zenodo.8021211.svg)](https://doi.org/10.5281/zenodo.8021211) + +``` +@inproceedings{de2023harmonic, + title={The Harmonic Memory: a Knowledge Graph of harmonic patterns as a trustworthy framework for computational creativity}, + author={de Berardinis, Jacopo and Mero{\~n}o-Pe{\~n}uela, Albert and Poltronieri, Andrea and Presutti, Valentina}, + booktitle={Proceedings of the ACM Web Conference 2023}, + pages={3873--3882}, + year={2023} +} +``` + +## Acknowledgments + +We thank all the annotators for contributing to the project. This project is an output of [Polifonia](https://polifonia-project.eu), and has received funding from the European Union’s Horizon 2020 research and innovation programme under grant agreement No 101004746. + + + +## License + +The code in this repository is distributed under the MIT license. All data follows the dual licensing scheme of [ChoCo](https://github.com/smashub/choco), meaning that CC-BY 4.0 is the reference license, unless data is derived from CASD, JAAH, and Mozart Piano Sonata (CC-BY-NC-SA). diff --git a/content/_spice-h2020/ecosystem/content/_smashub/harmory/assets/header.md b/content/_spice-h2020/ecosystem/content/_smashub/harmory/assets/header.md new file mode 100644 index 00000000..80e80ae5 --- /dev/null +++ b/content/_spice-h2020/ecosystem/content/_smashub/harmory/assets/header.md @@ -0,0 +1,49 @@ +--- +component-id: Harmory +type: KnowledgeGraph +name: "Harmory: the Harmonic Memory" +description: A Knowledge Graph of interconnected harmonic patterns aimed to support computationally creative applications. +image: assets/harmory_wide.png +work-package: +- WP2 +pilot: +- INTERLINK +project: polifonia-project +release-date: 01-02-2023 +release-number: v1.0 +release-link: https://github.com/smashub/harmory +doi: 10.5281/zenodo.8021211 +licence: + - CC-BY_v4 + - CC-BY-NC_v4 +demo: https://github.com/smashub/harmory/blob/main/harmory/analysis.ipynb +changelog: https://github.com/smashub/harmory/releases +copyright: "Copyright (c) 2023 Harmory Contributors" +contributors: # replace these with the GitHub URL of each contributor +- Jacopo de Berardinis +- Andrea Poltronieri +related-components: +- informed-by: + - polifoniacq-dataset +- reuses: # any reused/imported ontology +- ChoCo +- https://w3id.org/polifonia/ontology/core/ +- https://w3id.org/polifonia/ontology/music-meta/ +- https://w3id.org/polifonia/ontology/jams/ +bibliography: +- main-publication: "Jacopo de Berardinis, Albert Meroño Peñuela, Andrea Poltronieri, and Valentina Presutti. The Harmonic Memory: a Knowledge Graph of harmonic patterns as a trustworthy framework for computational creativity. In Proceedings of the ACM Web Conference 2023 (pp. 3873-3882)." +--- + +# Harmory: the Harmonic Memory + +Harmory is a Knowledge Graph of interconnected harmonic patterns aimed to support creative applications in a fully transparent, accountable, and musically plausible way. + +![Harmory](assets/harmory_wide.png) + +We leverage the [Tonal Pitch Space](https://www.jstor.org/stable/40285402) - a cognitive model of Western tonal harmony to **project** chord progressions into a musically meaningful space. Then, we use novelty-based methods for structural analysis to **segment** chord sequences into meaningful harmonic structures. The latter are then compared with each other, across all progressions and via harmonic similarity, to reveal common/recurring **harmonic patterns**. + +A KG is created to semantically establish relationships between patterns, based on: (i) *temporal links*, connecting two patterns if they are observed consecutively in the same progression; and (ii) *similarity links* among highly-similar patterns. By traversing the KG, and moving across patterns via temporal and similarity links, new progressions can be created in a combinational settings; but also, unexpected and surprising relationships can be found among pieces and composers of different genre, style, and historical period. This is also enabled by the scale and diversity of Harmory, which is built from [ChoCo](https://github.com/smashub/choco), the largest existing collection of harmonic annotations. + +Currently, Harmory contains ~26K harmonic segments from 1800 harmonic (~10% of ChoCo, corresponding to all the audio partitions). Out of all segments: 13667 (16%) correspond to the same pattern families, 66175 (53%) are pattern-friendly (they share non-trivial similarities with other segments), whereas 8176 (32%) are inherently unique (they are found in other songs). More statistics are available at [this link](https://github.com/smashub/harmory/blob/main/harmory/analysis.ipynb). + +[More info here](https://github.com/smashub/harmory) diff --git a/content/_spice-h2020/ecosystem/index.md b/content/_spice-h2020/ecosystem/index.md new file mode 100644 index 00000000..179f8ba0 --- /dev/null +++ b/content/_spice-h2020/ecosystem/index.md @@ -0,0 +1,142 @@ +--- +container-id: polifonia-project +type: Project +work-package: +- WP1 +- WP2 +- WP3 +- WP4 +- WP5 +- WP6 +- WP7 +- WP8 +pilot: +- TUNES +- BELLS +- INTERLINK +- MUSICBO +- TONALITIES +- MEETUPS +- CHILD +- ORGANS +- ACCESS +- FACETS +funder: + - name: Horizon 2020 Framework Programme + url: https://cordis.europa.eu/programme/id/H2020-EC + grant-agreement: "https://cordis.europa.eu/project/id/101004746" +credits: "This project has received funding from the European Union’s Horizon 2020 research and innovation programme under grant agreement N. 101004746." +layout: default +title: Home +nav_order: 0 +permalink: / +ecosystem-release: v2.0 +--- + +# Polifonia Ecosystem ({{ page.ecosystem-release }}) +{: .fs-9 } + +Data, software, and documentation of the output of the EU H2020 project [Polifonia]({{ site.main-project-url }}.). +{: .fs-6 .fw-300 } + +--- + +The Polifonia Ecosystem is a collection of components for developing intelligent applications leveraging musical cultural heritage, result of the Polifonia Project. +The project aims at realising and deploying anecosystem of computational methods and tools supporting discovery, extraction, encoding, interlinking, classification, exploration of, and access to, musical heritage knowledge on the Web. + +{% comment %} +Polifonia development methodology is two ways. +It follows a bottom-up approach, ensuring that the development is participatory, agile, feature driven, and collaborative. +However, this is paired with a top-down approach, where a Technical Board interacts with WP leaders to ensure the objectives of the project are fully met. + +The components are both independent – they have some value on their own – and interlinked – they can be used together in order tosatisfy specific end-user needs. +Independence is a well known principle of software engineering, which is conceived alongside the one of inter-operability - the ability of a software componentto operate with others. +However, the possible connections between ecosystem components don’t necessarily derive from software-to-software relations but involve, for example, users being able to perform a complex task by using multiple tools, whose user interfaces are linked, or enable users to transfer data from one environment to another thanks to the mutual support of shared formats. +The Polifonia Project delivers its results as reusable assets, alongside an extensive metadata set and documentation. This is the Polifonia Ecosystem. +{% endcomment %} +{% assign types = site.documents | map: 'type' | join: ',' | split: ',' | uniq | sort %} + + +{% comment %} +## Summary +
+ +{% assign types_activity = "Container,Project,WorkingGroup,WorkPackage,Task,UseCase,Pilot" | split: "," %} +{% assign ncomponents = site.documents | where_exp: 'item',"types_activity contains item.type" | size %} +{% assign npages = site.pages | where_exp: 'item',"types_activity contains item.type" | size %} +{% assign nactivities = npages | plus: ncomponents %} +{% endcomment %} + + +The ecosystem includes {% assign types_data = "Data,Dataset,Schema,Repository,Registry,Ontology,Corpus,Lexicon,KnowledgeGraph" | split: "," %}{% assign ncomponents = site.documents | where_exp: 'item',"types_data contains item.type" | size %} {{ncomponents}} data, {% assign software_data = "Software,Workflow,API,UserInterface,SofwareLibrary,DockerImageContainer,Notebook,Script,Application,Website,WebApplication,WebService,SPARQLEndpoint,MobileApp,CLITool" | split: "," %} {% assign ncomponents = site.documents | where_exp: 'item',"software_data contains item.type" | size %} {{ncomponents}} tools, {% assign report_data = "Report,RequirementsCollection,Story,Persona,Mockup,Surbey,InPresenceGroup,Documentation,Tutorial,EvaluationReport" | split: "," %}{% assign ncomponents = site.documents | where_exp: 'item',"report_data contains item.type" | size %} and {{ncomponents}} reports. + +Polifonia content is managed on [GitHub](http://github.com/{{ site.github }}). + +{% comment %} +#### All types +{% for type in types %} +{% assign ncomponents = site.documents | where: 'type', type | size %} +{% if ncomponents > 0 %} {{ type }}: {{ ncomponents }} {% endif %} +{% endfor %} + +## List of components +{% for type in types %} +{% if type != "" %} +### {{ type }} ({{ site.documents | where: 'type',type | size }}) + {% assign components = site.documents | where: 'type',type %} + {% for component in components %} +- [{{ component.name }}]({{ component.url | relative_url }}) {% endfor %} +{% endif %} +{% endfor %} +style="width: 100%; height: 100%" +{% endcomment %} diff --git a/content/_spice-h2020/ecosystem/pages/_external-components.md b/content/_spice-h2020/ecosystem/pages/_external-components.md new file mode 100644 index 00000000..1c75e169 --- /dev/null +++ b/content/_spice-h2020/ecosystem/pages/_external-components.md @@ -0,0 +1,17 @@ +--- +layout: default +title: External components +nav_order: 30 +permalink: /external-components.html +--- + +
+

External components

+ +
diff --git a/content/_spice-h2020/ecosystem/pages/_stories.md b/content/_spice-h2020/ecosystem/pages/_stories.md new file mode 100644 index 00000000..204d11eb --- /dev/null +++ b/content/_spice-h2020/ecosystem/pages/_stories.md @@ -0,0 +1,24 @@ +--- +layout: default +title: foo +nav_order: 10 +permalink: /stories.html +--- + +
+

Stories

+{% assign stories_grouped = site.stories | group_by: "persona" %} +{% for group in stories_grouped %} +{% assign personashort = group.name | replace: '["' | replace: '"]'%} +

{{ personashort }}

+{% for item in group.items %} + +{% endfor %} +{% endfor %} +
diff --git a/content/_spice-h2020/ecosystem/pages/_tags.md b/content/_spice-h2020/ecosystem/pages/_tags.md new file mode 100644 index 00000000..7ebb458a --- /dev/null +++ b/content/_spice-h2020/ecosystem/pages/_tags.md @@ -0,0 +1,36 @@ +--- +layout: default +title: Tags (dev) +nav_order: 8 +permalink: /tags.html +--- + +# Tags (dev) + +## Work Packages + +{% for wp in site.pages %} + {% if wp.identifier == "wppage" and wp.name != "index.md" %} +### WP{{ wp.title }} +{% assign wp-posts = site.documents | where: "work-package", wp.work-package %} + + {% endif %} +{% endfor %} + +## Pilots + +{% for pilot in site.pages %} + {% if pilot.identifier == "pilotpage" and pilot.name != "index.md" %} +### {{ pilot.title }} + {% assign pilot-posts = site.documents | where: "pilot", pilot.title %} + + {% endif %} +{% endfor %} diff --git a/content/_spice-h2020/ecosystem/pages/data.md b/content/_spice-h2020/ecosystem/pages/data.md new file mode 100644 index 00000000..ef2147d4 --- /dev/null +++ b/content/_spice-h2020/ecosystem/pages/data.md @@ -0,0 +1,61 @@ +--- +id: data +name: Components of type Data +description: List of components of type data, schemas, ontologies +layout: default +title: Data +nav_order: 12 +permalink: /data.html +--- + +# Data + +This section collects project outputs that are released as *data*. +The ecosystem considers data any digital object that specifies, describes, or represents facts about the project's domain of interest. +These include various types of digital objects such as [datasets](#dataset), [corpora](#corpus), [ontologies](#ontology), or [repositories](#repository). + +
+ + +{% assign types_data = "Data,Dataset,Schema,Repository,Registry,Ontology,Corpus,Lexicon,KnowledgeGraph" | split: "," %} +{% for type in types_data %} +{% if type != "" %} +{% assign components = site.documents | where: 'type',type %} +{% assign numberOf = components | size %} +{% if numberOf > 0 %} +### {{ type }} + +There are {{numberOf}} components of type {{type}}: + {% for component in components %} +- [{% if component.name %}{{ component.name }}{%else%}{{ component.component-id}} {%endif%}]({{ component.url | relative_url }}) {% endfor %} +{% endif %} +{% endif %} +{% endfor %} diff --git a/content/_spice-h2020/ecosystem/pages/licences.md b/content/_spice-h2020/ecosystem/pages/licences.md new file mode 100644 index 00000000..55f20c67 --- /dev/null +++ b/content/_spice-h2020/ecosystem/pages/licences.md @@ -0,0 +1,67 @@ +--- +layout: default +title: Licences +nav_order: 20 +permalink: /licences.html +--- + +# {{ page.title }} + +The Polifonia Ecosystem components, grouped by licences. + + +
+ + + + + +{% assign licences = site.data.licences |sort: 'title' %} + +{% for licence in licences %} + {% assign comps = site.documents | where_exp: 'item',"item.licence contains licence.code" %} + {% assign cnumber = comps | size %} + {% if cnumber > 0 %} +### {{licence.title}} + +Published by {{licence.publisher}} + +Link to legal text: {{licence.title}} + +{{cnumber}} component{% if cnumber > 1%}s are{%else%} is{%endif%} released with this licence: + + {% endif %} +{% endfor %} \ No newline at end of file diff --git a/content/_spice-h2020/ecosystem/pages/personas.md b/content/_spice-h2020/ecosystem/pages/personas.md new file mode 100644 index 00000000..380d9203 --- /dev/null +++ b/content/_spice-h2020/ecosystem/pages/personas.md @@ -0,0 +1,29 @@ +--- +layout: default +title: Requirements +nav_order: 11 +permalink: /personas.html +--- + +# {{ page.title }} + +The Polifonia Ecosystem, from the perspective of the target communities, exemplified by a set of Persona. + +{% assign children_list = site.documents | where: "type", "Persona" %} +{% for child in children_list %} +### {{ child.long-title }} +{{ child.description }} +{% assign stories = site.documents | where: "type", "Story" %} +
    +{% for story in stories %} +{% if story['related-components'] %} +{% assign related = story['related-components'] %} +{% for r in related %} + {% if r.persona contains child.component-id %} +
  • {{ story.name }}
  • + {% endif %} +{% endfor %} +{% endif %} +{% endfor %} +
+{% endfor %} diff --git a/content/_spice-h2020/ecosystem/pages/pilots/access.md b/content/_spice-h2020/ecosystem/pages/pilots/access.md new file mode 100644 index 00000000..5019ec9c --- /dev/null +++ b/content/_spice-h2020/ecosystem/pages/pilots/access.md @@ -0,0 +1,26 @@ +--- +container-id: ACCESS +name: "ACCESS: Perceiving music with your body" +description: Making musical performances accessible to people with disabilities +type: Pilot +work-package: + - WP1 + - WP5 +project: polifonia-project +funder: + - name: Horizon 2020 Framework Programme + url: https://cordis.europa.eu/programme/id/H2020-EC + grant-agreement: "https://cordis.europa.eu/project/id/101004746" +credits: "This project has received funding from the European Union’s Horizon 2020 research and innovation programme under grant agreement N. 101004746." +has-part: [] +# Do not remove the below. +title: "ACCESS" +long-title: "ACCESS: Perceiving music with your body" +pilot: "ACCESS" +parent: "Pilots" +layout: pilot +link: https://polifonia-project.eu/pilots/access/ +--- + +The aim of this use case is to co-design, develop and evaluate wearable haptic technology to enable people who are Deaf or hearing impaired to engage as audience members in live performances. + diff --git a/content/_spice-h2020/ecosystem/pages/pilots/bells.md b/content/_spice-h2020/ecosystem/pages/pilots/bells.md new file mode 100644 index 00000000..bf6fa556 --- /dev/null +++ b/content/_spice-h2020/ecosystem/pages/pilots/bells.md @@ -0,0 +1,24 @@ +--- +container-id: BELLS +name: "BELLS: Preserving historical bells cultural heritage" +description: "Analyse the bell heritage to the wider context of a landscape and cultural heritage" +type: Pilot +work-package: [] +project: polifonia-project +funder: + - name: Horizon 2020 Framework Programme + url: https://cordis.europa.eu/programme/id/H2020-EC + grant-agreement: "https://cordis.europa.eu/project/id/101004746" +credits: "This project has received funding from the European Union’s Horizon 2020 research and innovation programme under grant agreement N. 101004746." +has-part: [] +# Do not remove the below. +title: "BELLS" +long-title: "BELLS: Preserving historical bells cultural heritage" +pilot: "BELLS" +parent: "Pilots" +layout: pilot +link: https://polifonia-project.eu/pilots/bells/ +--- + +Bell structures are widespread both in urban and rural areas. They contribute to the distinctive shape of a landscape, defining its soundscape and playing as markers of daily, festive and ritual times. Bell heritage is complex and fascinating and influences our perception of the places we live daily. Both its tangible and intangible assets, and their dependencies are hardly encoded explicitly: most of this heritage is transmitted orally. This pilot intends to encode this valuable information in a knowledge graph, which will be publicly available and particularly relevant for scholars and cultural institutes. + diff --git a/content/_spice-h2020/ecosystem/pages/pilots/child.md b/content/_spice-h2020/ecosystem/pages/pilots/child.md new file mode 100644 index 00000000..a2948a98 --- /dev/null +++ b/content/_spice-h2020/ecosystem/pages/pilots/child.md @@ -0,0 +1,23 @@ +--- +container-id: CHILD +name: "CHILD: Music experience in childhood" +description: "Exploration of historical experience of music in childhood related to musical scores." +type: Pilot +work-package: [] +project: polifonia-project +funder: + - name: Horizon 2020 Framework Programme + url: https://cordis.europa.eu/programme/id/H2020-EC + grant-agreement: "https://cordis.europa.eu/project/id/101004746" +credits: "This project has received funding from the European Union’s Horizon 2020 research and innovation programme under grant agreement N. 101004746." +has-part: [] +# Do not remove the below. +title: "CHILD" +long-title: "CHILD: Music experience in childhood" +pilot: "CHILD" +parent: "Pilots" +layout: pilot +link: https://polifonia-project.eu/pilots/child/ +--- + +This pilot will build a knowledge graph of the historical experience of music in childhood, using life writing (letters, diaries, memoirs, travel writing) and other historical texts as sources for adult reflections on music heard in childhood, third-party observations on children’s engagement with music, and children’s own first-hand accounts. \ No newline at end of file diff --git a/content/_spice-h2020/ecosystem/pages/pilots/facets.md b/content/_spice-h2020/ecosystem/pages/pilots/facets.md new file mode 100644 index 00000000..c8e90e1e --- /dev/null +++ b/content/_spice-h2020/ecosystem/pages/pilots/facets.md @@ -0,0 +1,23 @@ +--- +container-id: FACETS +name: "FACETS: Exploration and discovery in large collections of music scores" +description: "Exploration and discovery in large collections of music scores through statistical features." +type: Pilot +work-package: [] +project: polifonia-project +funder: + - name: Horizon 2020 Framework Programme + url: https://cordis.europa.eu/programme/id/H2020-EC + grant-agreement: "https://cordis.europa.eu/project/id/101004746" +credits: "This project has received funding from the European Union’s Horizon 2020 research and innovation programme under grant agreement N. 101004746." +has-part: [] +# Do not remove the below. +title: "FACETS" +long-title: "FACETS: Exploration and discovery in large collections of music scores" +pilot: "FACETS" +parent: "Pilots" +layout: pilot +link: https://polifonia-project.eu/pilots/facets/ +--- + +Music libraries currently lacks well-founded information retrieval tools. While it is relatively easy to find music based on metadata, content-based music retrieval still remains as a challenge. The Facets pilot aims to tackle this challenge by building a faceted search engine (FSE) for large collections of music documents. \ No newline at end of file diff --git a/content/_spice-h2020/ecosystem/pages/pilots/index.md b/content/_spice-h2020/ecosystem/pages/pilots/index.md new file mode 100644 index 00000000..ee2d9420 --- /dev/null +++ b/content/_spice-h2020/ecosystem/pages/pilots/index.md @@ -0,0 +1,19 @@ +--- +layout: default +title: Pilots +has_children: true +has_toc: false +nav_order: 6 +--- + +# {{ page.title }} + +The Polifonia Ecosystem, from the perspective of the project pilots. + +{% assign children_list = site.pages | where: "parent", page.title %} +{% for child in children_list %} +### {{ child.long-title }} +{{ child.description }} + +{% endfor %} + diff --git a/content/_spice-h2020/ecosystem/pages/pilots/interlink.md b/content/_spice-h2020/ecosystem/pages/pilots/interlink.md new file mode 100644 index 00000000..b4069094 --- /dev/null +++ b/content/_spice-h2020/ecosystem/pages/pilots/interlink.md @@ -0,0 +1,23 @@ +--- +container-id: INTERLINK +name: "INTERLINK: Integrating digital music libraries" +description: "Interlinking of collections in European digital music libraries and audiovisuals archives." +type: Pilot +work-package: [] +project: polifonia-project +funder: + - name: Horizon 2020 Framework Programme + url: https://cordis.europa.eu/programme/id/H2020-EC + grant-agreement: "https://cordis.europa.eu/project/id/101004746" +credits: "This project has received funding from the European Union’s Horizon 2020 research and innovation programme under grant agreement N. 101004746." +has-part: [] +# Do not remove the below. +title: "INTERLINK" +long-title: "INTERLINK: Integrating digital music libraries" +pilot: "INTERLINK" +parent: "Pilots" +layout: pilot +link: "https://polifonia-project.eu/pilots/interlink/" +--- + +In order to answer research questions, musical heritage scholars need to combine diverse datasets ranging from music scores, audiovisual material to metadata. They need to identify similar entities and concepts implicitly present in the data, across different collections in different institutions. diff --git a/content/_spice-h2020/ecosystem/pages/pilots/meetups.md b/content/_spice-h2020/ecosystem/pages/pilots/meetups.md new file mode 100644 index 00000000..b453a9fa --- /dev/null +++ b/content/_spice-h2020/ecosystem/pages/pilots/meetups.md @@ -0,0 +1,23 @@ +--- +container-id: MEETUPS +name: "MEETUPS: People and music: exploring their encounters over centuries" +description: "Providing a Web tool that enables the exploration and visualization of encounters between people in the musical world in Europe." +type: Pilot +work-package: [] +project: polifonia-project +funder: + - name: Horizon 2020 Framework Programme + url: https://cordis.europa.eu/programme/id/H2020-EC + grant-agreement: "https://cordis.europa.eu/project/id/101004746" +credits: "This project has received funding from the European Union’s Horizon 2020 research and innovation programme under grant agreement N. 101004746." +has-part: [] +# Do not remove the below. +title: "MEETUPS" +long-title: "MEETUPS: People and music: exploring their encounters over centuries" +pilot: "MEETUPS" +parent: "Pilots" +layout: pilot +link: "https://polifonia-project.eu/pilots/meetups/" +--- + +This pilot focuses on supporting music historians and teachers by providing a Web tool that enables the exploration and visualisation of encounters between people in the musical world in Europe from c.1800 to c.1945, relying on information extracted from public domain books such as biographies, memoirs and travel writing, and open-access databases. diff --git a/content/_spice-h2020/ecosystem/pages/pilots/musicbo.md b/content/_spice-h2020/ecosystem/pages/pilots/musicbo.md new file mode 100644 index 00000000..25cd2c67 --- /dev/null +++ b/content/_spice-h2020/ecosystem/pages/pilots/musicbo.md @@ -0,0 +1,23 @@ +--- +container-id: MUSICBO +name: "MUSICBO: Bologna musical heritage stories from Europe" +description: "Retrace the role of music in the city of Bologna from a socio-cultural perspective." +type: Pilot +work-package: [] +project: polifonia-project +funder: + - name: Horizon 2020 Framework Programme + url: https://cordis.europa.eu/programme/id/H2020-EC + grant-agreement: "https://cordis.europa.eu/project/id/101004746" +credits: "This project has received funding from the European Union’s Horizon 2020 research and innovation programme under grant agreement N. 101004746." +has-part: [] +# Do not remove the below. +title: "MUSICBO" +long-title: "MUSICBO: Bologna musical heritage stories from Europe" +pilot: "MUSICBO" +parent: "Pilots" +layout: pilot +link: https://polifonia-project.eu/pilots/musicbo/ +--- + +In 2006 Bologna was declared by Unesco “City of Music”. Despite this, the extraordinary musical heritage of this city is kept in cultural institutions and archives, studied by a few specialized scholars only and unknown to the many. The overall objective of this pilot is to make Bologna’s musical heritage available and accessible to the wide public, including scholars of European music, but also music enthusiasts, students, cultural tourists, creative industries and citizens. diff --git a/content/_spice-h2020/ecosystem/pages/pilots/organs.md b/content/_spice-h2020/ecosystem/pages/pilots/organs.md new file mode 100644 index 00000000..d9722316 --- /dev/null +++ b/content/_spice-h2020/ecosystem/pages/pilots/organs.md @@ -0,0 +1,24 @@ +--- +container-id: ORGANS +name: "ORGANS: The Dutch pipe organ through history." +description: "Reconnect information on the histories and characteristics of historic organs in the Netherlands." +type: Pilot +work-package: [] +project: polifonia-project +funder: + - name: Horizon 2020 Framework Programme + url: https://cordis.europa.eu/programme/id/H2020-EC + grant-agreement: "https://cordis.europa.eu/project/id/101004746" +credits: "This project has received funding from the European Union’s Horizon 2020 research and innovation programme under grant agreement N. 101004746." +has-part: [] +# Do not remove the below. +title: "ORGANS" +long-title: "ORGANS: The Dutch pipe organ through history." +pilot: "ORGANS" +parent: "Pilots" +layout: pilot +link: https://polifonia-project.eu/pilots/organs/ +--- + +The history of pipe organs is rich and diverse, and highly interrelated to economic, religious and artistic contexts. Currently, the information about building practices and characteristics of ~2000 Dutch pipe organs is only retrievable by manually paging a 15 volumes (4,500+ pages) encyclopaedia: the Orgelencyclopedie (1997-2010). This pilot will extract a knowledge graph out of the text of the Orgelencyclopedie, which will provide digital (and quick) access to such huge and detailed knowledge, including connection to data about aspects of their wider historic contexts. + diff --git a/content/_spice-h2020/ecosystem/pages/pilots/tonalities.md b/content/_spice-h2020/ecosystem/pages/pilots/tonalities.md new file mode 100644 index 00000000..92076f3c --- /dev/null +++ b/content/_spice-h2020/ecosystem/pages/pilots/tonalities.md @@ -0,0 +1,23 @@ +--- +container-id: TONALITIES +name: "TONALITIES: Influences between music tradition over centuries" +description: "Modal and tonal classification of Western notated music from the Renaissance to the 20th century." +type: Pilot +work-package: [] +project: polifonia-project +funder: + - name: Horizon 2020 Framework Programme + url: https://cordis.europa.eu/programme/id/H2020-EC + grant-agreement: "https://cordis.europa.eu/project/id/101004746" +credits: "This project has received funding from the European Union’s Horizon 2020 research and innovation programme under grant agreement N. 101004746." +has-part: [] +# Do not remove the below. +title: "TONALITIES" +long-title: "TONALITIES: Influences between music tradition over centuries" +pilot: "TONALITIES" +parent: "Pilots" +layout: pilot +link: https://polifonia-project.eu/pilots/tonalities/ +--- + +Tonalities is developing tools for the modal-tonal identification, exploration and classification of monophonic and polyphonic notated music from the Renaissance to the 20th century. The pilot has a broader societal and pedagogical dimension: it does not only impact research on the theory and evolution of the musical language but is also relevant for the understanding of music collections by students, performers, and informed music lovers. \ No newline at end of file diff --git a/content/_spice-h2020/ecosystem/pages/pilots/tunes.md b/content/_spice-h2020/ecosystem/pages/pilots/tunes.md new file mode 100644 index 00000000..0aac3876 --- /dev/null +++ b/content/_spice-h2020/ecosystem/pages/pilots/tunes.md @@ -0,0 +1,23 @@ +--- +container-id: TUNES +name: "TUNES: Influences between music tradition over centuries" +description: "Tunes analysis and classification of international origin of Dutch early popular music culture" +type: Pilot +work-package: [] +project: polifonia-project +funder: + - name: Horizon 2020 Framework Programme + url: https://cordis.europa.eu/programme/id/H2020-EC + grant-agreement: "https://cordis.europa.eu/project/id/101004746" +credits: "This project has received funding from the European Union’s Horizon 2020 research and innovation programme under grant agreement N. 101004746." +has-part: [] +# Do not remove the below. +title: "TUNES" +long-title: "TUNES: Influences between music tradition over centuries" +pilot: "TUNES" +parent: "Pilots" +layout: pilot +link: https://polifonia-project.eu/pilots/tunes/ +--- + +The digital music collection of the Meertens Instituut (Amsterdam) includes thousands of melodies from Dutch popular culture, spanning a period of more than five centuries. To trace possible international origins of Dutch early popular music culture, this pilot will interlink the entire melody collection of the Meertens Institute with a large number of other European collections. \ No newline at end of file diff --git a/content/_spice-h2020/ecosystem/pages/report.md b/content/_spice-h2020/ecosystem/pages/report.md new file mode 100644 index 00000000..6ea4e1d0 --- /dev/null +++ b/content/_spice-h2020/ecosystem/pages/report.md @@ -0,0 +1,60 @@ +--- +id: report +name: Components of type Report +description: List of components of type report +layout: default +title: Reports +nav_order: 15 +permalink: /report.html +--- + +# Reports + +This section collects project outputs that are released as *report*. +The ecosystem considers reports any digital object that specifies, describes, or represents facts about the project's domain of interest. +Reports differ from *data* as they are mainly directed to human consumption, rather than computational treatment. +Reports include various types of digital objects such as [documentation](#documentation), [tutorial](#tutorial), [requirements collections](#requirementscollection), [stories](#story) or [persona](#persona) specifications. + +
+ + +{% assign report_data = "Report,RequirementsCollection,Story,Persona,Mockup,Surbey,InPresenceGroup,Documentation,Tutorial,EvaluationReport" | split: "," %} +{% for type in report_data %} +{% if type != "" %} +{% assign components = site.documents | where: 'type',type %} +{% assign numberOf = components | size %} +{% if numberOf > 0 %} +### {{ type }} + +There are {{numberOf}} components of type {{type}}: + {% for component in components %} +- [{% if component.name %}{{ component.name }}{%else%}{{ component.component-id}} {%endif%}]({{ component.url | relative_url }}) {% endfor %} +{% endif %} +{% endif %} +{% endfor %} \ No newline at end of file diff --git a/content/_spice-h2020/ecosystem/pages/rulebook.md b/content/_spice-h2020/ecosystem/pages/rulebook.md new file mode 100644 index 00000000..2289a1ad --- /dev/null +++ b/content/_spice-h2020/ecosystem/pages/rulebook.md @@ -0,0 +1,131 @@ +--- +id: rulebook +name: Ecosystem Development Rulebook +brief-description: Guidelines, recommendations, and norms on how to contribute to the Polifonia Ecosystem. +type: Documentation +release-date: TBD +release-number: v0.1-dev +work-package: +- WP1 +licence: Public domain, https://unlicense.org +links: +- https://github.com/polifonia-project/rulebook +credits: +- https://github.com/enridaga +- https://github.com/raphaelfournier +--- +# Rulebook +Guidelines, recommendations, and norms on how to contribute to the Polifonia Ecosystem. + +## Guidelines +### When to create a repository? +Create a GH repository whenever there is an activity which leads to the production of a *component* of the *Polifonia Ecosystem*. + +### Do I really need to create a repository for anything I do? +No. But as soon as the work is discussed or presented in a meeting a repository should be already there, or follow straight after! +A repository with annotated component descriptions (see later) is mandatory for components mentioned in official deliverables. + +### What if a repository already exists somewhere else? +You don’t need to fork the repository in the Polifonia organisation. External components can be described (with annotations) in the repository [external-components](https://github.com/polifonia-project/external-components/). + +### Champion +Each repository must have a champion. Champions need to be annotated in the [CHAMPIONS.md](CHAMPIONS.md) file. + +### Discussion and decisions +Discussions can happen anywhere at anytime. However, decisions that impact the development of the component **MUST** be logged within an Issue (a Github issue, example) and motivated. + +If the decfision is not being recorded in an Issue, **it never happened**. + +### Tracking changes (commits) +Commit messages are mandatory and must reference at least one Issue. A good commit message is `Added folder XYZ with data from QWE, see also #432` where `#432` is the issue number in the same repository. You can also reference any URL in commit messages, please see GitHub documentation for examples. The more you link, the better. + +Useful readings on best practices: + + - https://gist.github.com/luismts/495d982e8c5b1a0ced4a57cf3d93cf60#file-gitcommitbestpractices-md + - https://medium.com/@danielfeelfine/commit-verbs-101-why-i-like-to-use-this-and-why-you-should-also-like-it-d3ed2689ef70 + +This is a [bad commit message](https://github.com/polifonia-project/rulebook/commit/78fb11bbe0fee670fea70dc3f3cf4bf096ab3513) +This is a [good commit message](https://github.com/polifonia-project/rulebook/commit/60dc07702fd6aaf86b029da0c5f873f77f36313e) + +### Tracking Progress Issue +Progress on the development of each component MUST be reported in the Issues section periodically. +Each repository **SHOULD** have a single **Tracking Progress Issue** for general progress update. +A simple reporting template can be a bullet list in three sections: Progress, Problems, and Perspectives (3P). + +The 3P are: + + - Progress: what concrete work has been done since the last update. + - Problems: anything that is slowing or blocking progress, or it is expected to do so. + - Perspectives: what progress is expected going forward, including plans that have been made to face any of the problems (if any). + +Please note that the Tracking progress issue is only for updates. Detailed, task-based issues should be used for referencing changes (commits) and can be linked in the Tracking Progress Issue. + +Examples: + +- [Tracking progress issue (Rulebook)](https://github.com/polifonia-project/rulebook/issues/7) +- [Tracking progress issue (External Components)](https://github.com/polifonia-project/external-components/issues/1) + +### Naming conventions +Some naming conventions have been discussed, feel free to contribute to the discussion [here](https://github.com/polifonia-project/rulebook/issues/2) + +For repositories + + - Avoid including “Polifonia” in the name (e.g. `ecosystem` rather then `polifonia-ecosystem`) + - Avoid acronyms (`ontology-network` instead of `ON`) + +### Branches +Use branches for managing different versions of the code / components. Avoid creating a branch for each sub-system (e.g. /datasets /ui etc... Instead, create different repositories. + +### Releases +Use Semantic Versioning for release numbers, and follow the GitHub workflow for releasing. + +Register your repository on Zenodo, by activating the related GitHub Action. See [this guide](https://guides.github.com/activities/citable-code/). + +## Contributing to the Ecosystem +### What is a Polifonia Ecosystem *component*? +Basically, anything that is not a research paper, dissemination product (e.g., video presentation of a tool), or deliverable. + +List of component types: + +Documentation: + - Story (strictly from https://github.com/polifonia-project/stories) + - Tutorial + - Documentation + +Executables: + - Application + - Container + - Experiment + - CLI tool + +Reusable software: + - Library + - User Interface + - Service + +Data: + - Registry + - Ontology + - Dataset + - Repository + - Corpus + - Knowledge Graph + +### Polifonia Ecosystem Website +A repository contains the development work for at least 1 component in the **Polifonia Ecosystem**. One markdown text file should expose annotations (metadata) relative to a single component included in the repository. For example, a component-name.md file using the annotation schema of the Polifonia Ecosystem (the file can have any name). A repository can include multiple annotated files, hence expose multiple components. +Those annotations will be used by the [Polifonia Ecosystem website](https://github.com/polifonia-project/ecosystem). +This website will provide a user interface for navigating through the Polifonia Ecosystem (with aggregation pages, tags, etc). +Please note that the Polifonia ecosystem website uses the content of Github repositories as is, hence the need for good quality annotations / documentation. + +### Developing Schema Components Annotations +The annotations should be written at the top of the markdown file, between 2 “---” lines. The markup format is YAML (mostly a “key: value” format, see also example at the top of this file). The schema to follow is [this one](schema.md). Developers can use this service to test the YAML code: https://jsonformatter.org/yaml-validator . + +### Process towards ecosystem releases + +- Champions curate releases with project-specific frequency and rationale +- TB calls for next Ecosystem Release +- Champions reply giving details about version number and expected deadline (if any) +- Champions ensure component metadata is accurate +- Ecosystem Website prepare release candidate +- TB tests and validates Ecosystem Website release candidate +- Ecosystem released diff --git a/content/_spice-h2020/ecosystem/pages/software.md b/content/_spice-h2020/ecosystem/pages/software.md new file mode 100644 index 00000000..ec96e58a --- /dev/null +++ b/content/_spice-h2020/ecosystem/pages/software.md @@ -0,0 +1,68 @@ +--- +id: software +name: Components of type Software or Application +description: List of components of type software or application +layout: default +title: Tools +nav_order: 13 +permalink: /software.html +--- + +# Tools + +This section collects project outputs that are released as *software* or *application*. + +The ecosystem considers software any digital object that is being produced by the project to achieve a certain task computationally. +We distinguish software from applications, where software is used (executed) by a user to perform the task. +These include various types of digital objects such as scripts, software libraries, workflows, or APIs. + +The ecosystem considers applications executable systems and tools produced by the project for the end-user to achieve a certain task computationally. +We distinguish applications from software, where software can be copied, moved, and executed in multiple different applications (each in relation to some usage senarios). +Applications include various types of artifacts such as Web Applications, Web sites, and Command Line Interface (CLI) tools. + +
+ + + +{% assign software_data = "Software,Workflow,API,UserInterface,SoftwareLibrary,DockerImageContainer,Notebook,Script,Application,Website,WebApplication,WebService,SPARQLEndpoint,MobileApp,CLITool" | split: "," | sort %} +{% for type in software_data %} +{% if type != "" %} +{% assign numberOf = site.documents | where_exp: 'item', "item.type == type" | size %} +{% if numberOf > 0 %} +### {{ type }} + +There are {{numberOf}} components of type {{type}}: + {% assign components = site.documents | where: 'type',type %} + {% include reeco-components-table.html components=components %} +{% endif %} +{% endif %} +{% endfor %} \ No newline at end of file diff --git a/content/_spice-h2020/ecosystem/pages/work-packages/index.md b/content/_spice-h2020/ecosystem/pages/work-packages/index.md new file mode 100644 index 00000000..6dd07e6a --- /dev/null +++ b/content/_spice-h2020/ecosystem/pages/work-packages/index.md @@ -0,0 +1,22 @@ +--- +layout: default +title: Work packages +has_children: true +has_toc: false +nav_order: 5 +--- + +# {{ page.title }} + +The Polifonia Ecosystem, from the perspective of the project work packages. + + +{% assign children_list = site.pages | where: "parent", page.title %} +{% for child in children_list %} +
+

{{ child.long-title }}

+{{ child.description }} +
+{% endfor %} + + diff --git a/content/_spice-h2020/ecosystem/pages/work-packages/wp1.md b/content/_spice-h2020/ecosystem/pages/work-packages/wp1.md new file mode 100644 index 00000000..474ecaef --- /dev/null +++ b/content/_spice-h2020/ecosystem/pages/work-packages/wp1.md @@ -0,0 +1,31 @@ +--- +title: "#1 Pilots and Web portal" +long-title: "#1 Socio-technical roadmap, pilots, and Web portal" +work-package: "WP1" +parent: "Work packages" +layout: wp +container-id: WP1 +type: WorkPackage +name: "Work Package #1: Pilots and Web portal" +description: "WP1 coordinates Pilots development and delivers a registry of all resources and materials retrieved, used and produced in the project, in the form of a unified Web portal." +pilot: +- TUNES +- BELLS +- INTERLINK +- MUSICBO +- TONALITIES +- MEETUPS +- CHILD +- ORGANS +- ACCESS +- FACETS +project: polifonia-project +funder: + - name: Horizon 2020 Framework Programme + url: https://cordis.europa.eu/programme/id/H2020-EC + grant-agreement: "https://cordis.europa.eu/project/id/101004746" +credits: "This project has received funding from the European Union’s Horizon 2020 research and innovation programme under grant agreement N. 101004746." +has-part: [] +--- + +WP1 coordinates Pilots development and delivers a registry of all resources and materials retrieved, used and produced in the project, in the form of a unified Web portal. Its goal is twofold: 1) to demonstrate that the methods and tools developed in the “technology provider” work packages (WP2-5) are effective in facilitating management of large musical heritage collections and supporting enhanced understanding, preservation of, and interaction with, musical heritage, 2) to contribute to push the state of the art in relevant, though specific, musical heritage use cases. diff --git a/content/_spice-h2020/ecosystem/pages/work-packages/wp2.md b/content/_spice-h2020/ecosystem/pages/work-packages/wp2.md new file mode 100644 index 00000000..14dd6487 --- /dev/null +++ b/content/_spice-h2020/ecosystem/pages/work-packages/wp2.md @@ -0,0 +1,23 @@ +--- +container-id: WP2 +type: WorkPackage +name: "Work Package #2: Musical Heritage Knowledge Graphs" +description: "WP2 develops ontology-based knowledge graphs for representing music collections and the its historical, cultural, and social context." +pilot: [] +project: polifonia-project +funder: + - name: Horizon 2020 Framework Programme + url: https://cordis.europa.eu/programme/id/H2020-EC + grant-agreement: "https://cordis.europa.eu/project/id/101004746" +credits: "This project has received funding from the European Union’s Horizon 2020 research and innovation programme under grant agreement N. 101004746." +has-part: [] +# Do not remove the below. +title: "#2 Knowledge Graphs" +long-title: "#2 Musical Heritage Knowledge Graphs" +work-package: "WP2" +parent: "Work packages" +layout: wp +--- + +WP2 develops ontology-based knowledge graphs for representing music collections (symbolic notation/scores, code-based musical assets, annotated audio/transcriptions, metadata) and the its historical, cultural, and social context. +It is also in charge of supporting the formalisation of patterns analysed in WP3 and text relations extracted in WP4. It relies on Web standards for data and knowledge representation (RDF, OWL). diff --git a/content/_spice-h2020/ecosystem/pages/work-packages/wp3.md b/content/_spice-h2020/ecosystem/pages/work-packages/wp3.md new file mode 100644 index 00000000..93e635fd --- /dev/null +++ b/content/_spice-h2020/ecosystem/pages/work-packages/wp3.md @@ -0,0 +1,22 @@ +--- +container-id: WP3 +type: WorkPackage +name: "Work Package #3: Mining Musical Patterns" +description: "WP3 develops approaches to analysing large repositories of music (tunes, songs, etc.) to identify common, meaningful patterns." +pilot: [] +project: polifonia-project +funder: + - name: Horizon 2020 Framework Programme + url: https://cordis.europa.eu/programme/id/H2020-EC + grant-agreement: "https://cordis.europa.eu/project/id/101004746" +credits: "This project has received funding from the European Union’s Horizon 2020 research and innovation programme under grant agreement N. 101004746." +has-part: [] +# Do not remove the below. +title: "#3 Patterns" +long-title: "#3 Mining Musical Patterns" +work-package: "WP3" +parent: "Work packages" +layout: wp +--- + +Relying on the results of WP2, the overall goal of WP3 is to devise approaches to analysing large repositories of music (tunes, songs, etc.) to identify common, meaningful patterns that are indicative of their identity, filiation or cultural association (genres, origin, etc). diff --git a/content/_spice-h2020/ecosystem/pages/work-packages/wp4.md b/content/_spice-h2020/ecosystem/pages/work-packages/wp4.md new file mode 100644 index 00000000..535b1ff0 --- /dev/null +++ b/content/_spice-h2020/ecosystem/pages/work-packages/wp4.md @@ -0,0 +1,22 @@ +--- +container-id: WP4 +type: WorkPackage +name: "Work Package #4: Musical Heritage Knowledge Extraction from text" +description: "WP4 develops methods and tools for 1) creating plurilingual corpora focused on musical heritage, 2) extracting (automatically) relevant knowledge from texts" +pilot: [] +project: polifonia-project +funder: + - name: Horizon 2020 Framework Programme + url: https://cordis.europa.eu/programme/id/H2020-EC + grant-agreement: "https://cordis.europa.eu/project/id/101004746" +credits: "This project has received funding from the European Union’s Horizon 2020 research and innovation programme under grant agreement N. 101004746." +has-part: [] +# Do not remove the below. +title: "#4 Extraction from Text" +long-title: "#4 Musical Heritage Knowledge Extraction from text" +work-package: "WP4" +parent: "Work packages" +layout: wp +--- + +WP4 develops methods and tools for 1) creating plurilingual corpora focused on musical heritage, 2) extracting (automatically) relevant knowledge from text such as people, places, time, music works, experiences, stories, events, etc. about musical heritage, 3) evaluating knowledge extraction tools against the manually curated corpora. The developed methodologies and tools will enable the processing of multilingual textual sources, which express diverse perspectives at different times: linguistic, historical, cultural, political, etc. diff --git a/content/_spice-h2020/ecosystem/pages/work-packages/wp5.md b/content/_spice-h2020/ecosystem/pages/work-packages/wp5.md new file mode 100644 index 00000000..649a4d83 --- /dev/null +++ b/content/_spice-h2020/ecosystem/pages/work-packages/wp5.md @@ -0,0 +1,22 @@ +--- +container-id: WP5 +type: WorkPackage +name: "Work Package #5: Human Interaction with Musical Heritage" +description: "WP5 researches and develops highly interactive user interfaces to allow scholars, musicians to access, engage with, explore, and reuse musical heritage." +pilot: [] +project: polifonia-project +funder: + - name: Horizon 2020 Framework Programme + url: https://cordis.europa.eu/programme/id/H2020-EC + grant-agreement: "https://cordis.europa.eu/project/id/101004746" +credits: "This project has received funding from the European Union’s Horizon 2020 research and innovation programme under grant agreement N. 101004746." +has-part: [] +# Do not remove the below. +title: "#5 Interaction" +long-title: "#5 Human Interaction with Musical Heritage" +work-package: "WP5" +parent: "Work packages" +layout: wp +--- + +The objective of WP6 is to research and develop highly interactive inter-operable user interface tools to allow scholars, musicians and other consumers of musical content and knowledge to access, engage with, explore and reuse the resources unlocked by WPs 2,3, and 4. diff --git a/content/_spice-h2020/ecosystem/pages/work-packages/wp6.md b/content/_spice-h2020/ecosystem/pages/work-packages/wp6.md new file mode 100644 index 00000000..dcaf2d02 --- /dev/null +++ b/content/_spice-h2020/ecosystem/pages/work-packages/wp6.md @@ -0,0 +1,22 @@ +--- +container-id: WP6 +type: WorkPackage +name: "Work Package #6: Dissemination and exploitation" +description: "WP6 disseminates the project results; maximizes their impact, reach and re-use; and concretely engage stakeholders from relevant sectors." +pilot: [] +project: polifonia-project +funder: + - name: Horizon 2020 Framework Programme + url: https://cordis.europa.eu/programme/id/H2020-EC + grant-agreement: "https://cordis.europa.eu/project/id/101004746" +credits: "This project has received funding from the European Union’s Horizon 2020 research and innovation programme under grant agreement N. 101004746." +has-part: [] +# Do not remove the below. +title: "#6 Dissemination" +long-title: "#6 Dissemination and exploitation" +work-package: "WP6" +parent: "Work packages" +layout: wp +--- + +The main objectives of WP6 are to: disseminate the project results; maximize their impact, reach and re-use; and concretely engage stakeholders from relevant sectors for community engagement. diff --git a/content/_spice-h2020/ecosystem/pages/work-packages/wp7.md b/content/_spice-h2020/ecosystem/pages/work-packages/wp7.md new file mode 100644 index 00000000..2f0dcd5f --- /dev/null +++ b/content/_spice-h2020/ecosystem/pages/work-packages/wp7.md @@ -0,0 +1,23 @@ +--- +container-id: WP7 +type: WorkPackage +name: "Work Package #7: Project Coordination and Management" +description: "WP7 is dedicated to project coordination and management activities." +pilot: [] +project: polifonia-project +funder: + - name: Horizon 2020 Framework Programme + url: https://cordis.europa.eu/programme/id/H2020-EC + grant-agreement: "https://cordis.europa.eu/project/id/101004746" +credits: "This project has received funding from the European Union’s Horizon 2020 research and innovation programme under grant agreement N. 101004746." +has-part: [] +# Do not remove the below. +title: "#7 Coordination" +long-title: "#7 Project Coordination and Management" +work-package: "WP7" +parent: "Work packages" +layout: wp +--- + +The overall management plan designed by Polifonia is aimed at achieving seven main objectives, namely: 1) to coordinate and supervise research activities according to the work plan; 2) to ensure the overall legal management of the project; 3) to establish all management structures, committees and working groups; 4) to establish effective internal communication procedures; 5) to carry out the overall administrative and financial management of the project; 6) to anticipate and manage project risks using risk management strategies where possible; 7) to manage the new knowledge generated by the project. + diff --git a/content/_spice-h2020/ecosystem/pages/work-packages/wp8.md b/content/_spice-h2020/ecosystem/pages/work-packages/wp8.md new file mode 100644 index 00000000..7e696dd6 --- /dev/null +++ b/content/_spice-h2020/ecosystem/pages/work-packages/wp8.md @@ -0,0 +1,23 @@ +--- +container-id: WP8 +type: WorkPackage +name: "Work Package #8: Ethics" +description: "WP8 ensures that the ecosystem tools improve the accessibility and availability of musical heritage knowledge on the web in a FAIR, transparent, and ethical way." +pilot: [] +project: polifonia-project +funder: + - name: Horizon 2020 Framework Programme + url: https://cordis.europa.eu/programme/id/H2020-EC + grant-agreement: "https://cordis.europa.eu/project/id/101004746" +credits: "This project has received funding from the European Union’s Horizon 2020 research and innovation programme under grant agreement N. 101004746." +has-part: [] +# Do not remove the below. +title: "#8 Ethics" +long-title: "#8 Ethics" +work-package: "WP8" +parent: "Work packages" +layout: wp +--- + +WP8 is dedicated to providing an ethical viewpoint to the research and development activities of the project and ensuring that all pilot applications as well as the ecosystem tools have a common general goal: to improve the accessibility and availability of musical heritage knowledge on the web, in a FAIR, transparent, and ethical way. +Specifically, it establishes an ethical protocol for ensuring that all Polifonia tools and applications comply with the ethical principles for trustworthy AI.