Skip to content

Commit

Permalink
Merge pull request #205 from ncbo/develop
Browse files Browse the repository at this point in the history
Develop to multilingual branch merge
  • Loading branch information
alexskr authored May 21, 2024
2 parents 112e39b + 47fa900 commit c2beff6
Show file tree
Hide file tree
Showing 5 changed files with 24 additions and 42 deletions.
4 changes: 2 additions & 2 deletions lib/ontologies_linked_data/diff/bubastis_diff.rb
Original file line number Diff line number Diff line change
Expand Up @@ -37,11 +37,11 @@ class BubastisDiffCommand < DiffTool
# Loading one file locally and one from the web and outputting results to plain text:
# java -jar bubastis_1_2.jar -ontology1 "H://disease_ontology_version_1.owl" -ontology2 "http://www.disease.org/diseaseontology_latest.owl" -output "C://my_diff.txt"

def initialize(old_file_path, new_file_path)
def initialize(old_file_path, new_file_path, output_repo)
@bubastis_jar_path = LinkedData.bindir + "/bubastis.jar"
@input_fileOld = old_file_path
@input_fileNew = new_file_path
@output_repo = File.expand_path(@input_fileNew).gsub(File.basename(@input_fileNew),'')
@output_repo = output_repo
@file_diff_path = nil
@java_heap_size = LinkedData.settings.java_max_heap_size
end
Expand Down
15 changes: 12 additions & 3 deletions lib/ontologies_linked_data/models/ontology_submission.rb
Original file line number Diff line number Diff line change
Expand Up @@ -283,6 +283,10 @@ def zip_folder
File.join([data_folder, 'unzipped'])
end

def master_file_folder
zipped? ? zip_folder : data_folder
end

def csv_path
return File.join(self.data_folder, self.ontology.acronym.to_s + ".csv.gz")
end
Expand Down Expand Up @@ -336,7 +340,6 @@ def unzip_submission(logger)
zip_dst
end


def class_count(logger=nil)
logger ||= LinkedData::Parser.logger || Logger.new($stderr)
count = -1
Expand Down Expand Up @@ -699,6 +702,14 @@ def parsable?(logger: Logger.new($stdout))
parsable
end

def owlapi_parser(logger: Logger.new($stdout))
unzip_submission(logger)
LinkedData::Parser::OWLAPICommand.new(
owlapi_parser_input,
File.expand_path(self.data_folder.to_s),
master_file: self.masterFileName,
logger: logger)
end

private

Expand All @@ -711,8 +722,6 @@ def owlapi_parser_input
File.expand_path(path)
end



def check_http_file(url)
session = Net::HTTP.new(url.host, url.port)
session.use_ssl = true if url.port == 443
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -18,8 +18,9 @@ def init_diff_tool(older)
older.bring(:uploadFilePath)

LinkedData::Diff::BubastisDiffCommand.new(
File.expand_path(older.uploadFilePath),
File.expand_path(@submission.uploadFilePath))
File.expand_path(older.uploadFilePath.to_s),
File.expand_path(@submission.uploadFilePath.to_s),
File.expand_path(@submission.data_folder.to_s))
end

def process_diff(logger)
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -307,11 +307,11 @@ def process_rdf(logger, reasoning)
raise e
end

MissingLabelsHandler.new(@submission).process(logger, file_path: @submission.master_file_path)
MissingLabelsHandler.new(@submission).process(logger, file_path: @submission.uploadFilePath.to_s)

status = LinkedData::Models::SubmissionStatus.find('OBSOLETE').first
begin
generate_obsolete_classes(logger, @submission.master_file_path)
generate_obsolete_classes(logger, @submission.uploadFilePath.to_s)
@submission.add_submission_status(status)
@submission.save
rescue Exception => e
Expand All @@ -329,7 +329,7 @@ def generate_rdf(logger, reasoning: true)

if @submission.hasOntologyLanguage.umls?
triples_file_path = @submission.triples_file_path
logger.info("Using UMLS turtle file found, skipping OWLAPI parse")
logger.info("UMLS turtle file found; doing OWLAPI parse to extract metrics")
logger.flush
mime_type = LinkedData::MediaTypes.media_type_from_base(LinkedData::MediaTypes::TURTLE)
SubmissionMetricsCalculator.new(@submission).generate_umls_metrics_file(triples_file_path)
Expand All @@ -348,10 +348,7 @@ def generate_rdf(logger, reasoning: true)
end

owlapi = @submission.owlapi_parser(logger: logger)

if !reasoning
owlapi.disable_reasoner
end
owlapi.disable_reasoner unless reasoning
triples_file_path, missing_imports = owlapi.parse

if missing_imports && missing_imports.length > 0
Expand All @@ -364,6 +361,9 @@ def generate_rdf(logger, reasoning: true)
@submission.missingImports = nil
end
logger.flush
# debug code when you need to avoid re-generating the owlapi.xrdf file,
# comment out the block above and uncomment the line below
# triples_file_path = output_rdf
end

begin
Expand All @@ -372,41 +372,15 @@ def generate_rdf(logger, reasoning: true)
logger.error("Error sending data to triple store - #{e.response.code} #{e.class}: #{e.response.body}") if e.response&.body
raise e
end
version_info = extract_version

if version_info
@submission.version = version_info
end
end



def delete_and_append(triples_file_path, logger, mime_type = nil)
Goo.sparql_data_client.delete_graph(@submission.id)
Goo.sparql_data_client.put_triples(@submission.id, triples_file_path, mime_type)
logger.info("Triples #{triples_file_path} appended in #{@submission.id.to_ntriples}")
logger.flush
end




def extract_version

query_version_info = <<eos
SELECT ?versionInfo
FROM #{@submission.id.to_ntriples}
WHERE {
<http://bioportal.bioontology.org/ontologies/versionSubject>
<http://www.w3.org/2002/07/owl#versionInfo> ?versionInfo .
}
eos
Goo.sparql_query_client.query(query_version_info).each_solution do |sol|
return sol[:versionInfo].to_s
end
return nil
end

def process_callbacks(logger, callbacks, action_name, &block)
callbacks.delete_if do |_, callback|
begin
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -36,9 +36,7 @@ def process_submission(logger, options = {})
if archive
@submission.archive
else

@submission.generate_rdf(logger, reasoning: reasoning) if process_rdf

parsed = @submission.ready?(status: [:rdf, :rdf_labels])

if index_search
Expand Down

0 comments on commit c2beff6

Please sign in to comment.