Skip to content
This repository has been archived by the owner on Feb 25, 2023. It is now read-only.

Commit

Permalink
Dictionary import progress improvements (#1868)
Browse files Browse the repository at this point in the history
* Update loop vars

* Update loop

* Improve progress reporting during the import process
  • Loading branch information
toasted-nutbread authored Jul 31, 2021
1 parent 01c5c5c commit cd3f47a
Show file tree
Hide file tree
Showing 2 changed files with 106 additions and 26 deletions.
85 changes: 67 additions & 18 deletions ext/js/language/dictionary-importer.js
Original file line number Diff line number Diff line change
Expand Up @@ -24,7 +24,8 @@
class DictionaryImporter {
constructor(mediaLoader, onProgress) {
this._mediaLoader = mediaLoader;
this._onProgress = onProgress;
this._onProgress = typeof onProgress === 'function' ? onProgress : () => {};
this._progressData = null;
}

async importDictionary(dictionaryDatabase, archiveContent, details) {
Expand All @@ -35,7 +36,7 @@ class DictionaryImporter {
throw new Error('Database is not ready');
}

const hasOnProgress = (typeof this._onProgress === 'function');
this._progressReset();

// Read archive
const archive = await JSZip.loadAsync(archiveContent);
Expand Down Expand Up @@ -72,6 +73,7 @@ class DictionaryImporter {
const convertTagBankEntry = this._convertTagBankEntry.bind(this);

// Load schemas
this._progressNextStep(0);
const dataBankSchemaPaths = this._getDataBankSchemaPaths(version);
const dataBankSchemas = await Promise.all(dataBankSchemaPaths.map((path) => this._getSchema(path)));

Expand All @@ -83,6 +85,7 @@ class DictionaryImporter {
const tagFiles = this._getArchiveFiles(archive, 'tag_bank_?.json');

// Load data
this._progressNextStep(termFiles.length + termMetaFiles.length + kanjiFiles.length + kanjiMetaFiles.length + tagFiles.length);
const termList = await this._readFileSequence(termFiles, convertTermBankEntry, dataBankSchemas[0], dictionaryTitle);
const termMetaList = await this._readFileSequence(termMetaFiles, convertTermMetaBankEntry, dataBankSchemas[1], dictionaryTitle);
const kanjiList = await this._readFileSequence(kanjiFiles, convertKanjiBankEntry, dataBankSchemas[2], dictionaryTitle);
Expand All @@ -100,17 +103,26 @@ class DictionaryImporter {
}

// Extended data support
this._progressNextStep(termList.length);
const formatProgressInterval = 1000;
const requirements = [];
for (const entry of termList) {
for (let i = 0, ii = termList.length; i < ii; ++i) {
const entry = termList[i];
const glossaryList = entry.glossary;
for (let i = 0, ii = glossaryList.length; i < ii; ++i) {
const glossary = glossaryList[i];
for (let j = 0, jj = glossaryList.length; j < jj; ++j) {
const glossary = glossaryList[j];
if (typeof glossary !== 'object' || glossary === null) { continue; }
glossaryList[i] = this._formatDictionaryTermGlossaryObject(glossary, entry, requirements);
glossaryList[j] = this._formatDictionaryTermGlossaryObject(glossary, entry, requirements);
}
if ((i % formatProgressInterval) === 0) {
this._progressData.index = i;
this._progress();
}
}
this._progress();

// Async requirements
this._progressNextStep(requirements.length);
const {media} = await this._resolveAsyncRequirements(requirements, archive);

// Add dictionary
Expand All @@ -119,15 +131,8 @@ class DictionaryImporter {
dictionaryDatabase.bulkAdd('dictionaries', [summary], 0, 1);

// Add data
this._progressNextStep(termList.length + termMetaList.length + kanjiList.length + kanjiMetaList.length + tagList.length);
const errors = [];
const total = (
termList.length +
termMetaList.length +
kanjiList.length +
kanjiMetaList.length +
tagList.length
);
let loadedCount = 0;
const maxTransactionLength = 1000;

const bulkAdd = async (objectStoreName, entries) => {
Expand All @@ -141,10 +146,8 @@ class DictionaryImporter {
errors.push(e);
}

loadedCount += count;
if (hasOnProgress) {
this._onProgress(total, loadedCount);
}
this._progressData.index += count;
this._progress();
}
};

Expand All @@ -155,9 +158,32 @@ class DictionaryImporter {
await bulkAdd('tagMeta', tagList);
await bulkAdd('media', media);

this._progress();

return {result: summary, errors};
}

_progressReset() {
this._progressData = {
stepIndex: 0,
stepCount: 6,
index: 0,
count: 0
};
this._progress();
}

_progressNextStep(count) {
++this._progressData.stepIndex;
this._progressData.index = 0;
this._progressData.count = count;
this._progress();
}

_progress() {
this._onProgress(this._progressData);
}

_createSummary(dictionaryTitle, version, index, details) {
const summary = {
title: dictionaryTitle,
Expand Down Expand Up @@ -328,6 +354,8 @@ class DictionaryImporter {
return;
}
Object.assign(target, result);
++this._progressData.index;
this._progress();
}

async _resolveDictionaryTermGlossaryImage(context, data, entry) {
Expand Down Expand Up @@ -500,10 +528,31 @@ class DictionaryImporter {
}

async _readFileSequence(files, convertEntry, schema, dictionaryTitle) {
const progressData = this._progressData;
let count = 0;
let startIndex = 0;
if (typeof this._onProgress === 'function') {
schema.progressInterval = 1000;
schema.progress = (s) => {
const index = s.getValueStackLength() > 1 ? s.getValueStackItem(1).path : 0;
progressData.index = startIndex + (index / count);
this._progress();
};
}

const results = [];
for (const file of files) {
const entries = JSON.parse(await file.async('string'));

count = Array.isArray(entries) ? Math.max(entries.length, 1) : 1;
startIndex = progressData.index;
this._progress();

this._validateJsonSchema(entries, schema, file.name);

progressData.index = startIndex + 1;
this._progress();

for (const entry of entries) {
results.push(convertEntry(entry, dictionaryTitle));
}
Expand Down
47 changes: 39 additions & 8 deletions ext/js/pages/settings/dictionary-import-controller.js
Original file line number Diff line number Diff line change
Expand Up @@ -140,13 +140,28 @@ class DictionaryImportController {
prefixWildcardsSupported: optionsFull.global.database.prefixWildcardsSupported
};

const onProgress = (total, current) => {
const percent = (current / total * 100.0);
let statusPrefix = '';
let stepIndex = -2;
const onProgress = (data) => {
const {stepIndex: stepIndex2, index, count} = data;
if (stepIndex !== stepIndex2) {
stepIndex = stepIndex2;
const labelText = `${statusPrefix} - Step ${stepIndex2 + 1} of ${data.stepCount}: ${this._getImportLabel(stepIndex2)}...`;
for (const label of infoLabels) { label.textContent = labelText; }
}

const percent = count > 0 ? (index / count * 100.0) : 0.0;
const cssString = `${percent}%`;
const statusString = `${percent.toFixed(0)}%`;
const statusString = `${Math.floor(percent).toFixed(0)}%`;
for (const progressBar of progressBars) { progressBar.style.width = cssString; }
for (const label of statusLabels) { label.textContent = statusString; }
this._triggerStorageChanged();

switch (stepIndex2) {
case -2: // Initialize
case 5: // Data import
this._triggerStorageChanged();
break;
}
};

const fileCount = files.length;
Expand All @@ -156,10 +171,13 @@ class DictionaryImportController {
importInfo.textContent = `(${i + 1} of ${fileCount})`;
}

onProgress(1, 0);

const labelText = `Importing dictionary${fileCount > 1 ? ` (${i + 1} of ${fileCount})` : ''}...`;
for (const label of infoLabels) { label.textContent = labelText; }
statusPrefix = `Importing dictionary${fileCount > 1 ? ` (${i + 1} of ${fileCount})` : ''}`;
onProgress({
stepIndex: -1,
stepCount: 6,
index: 0,
count: 0
});
if (statusFooter !== null) { statusFooter.setTaskActive(progressSelector, true); }

await this._importDictionary(files[i], importDetails, onProgress);
Expand All @@ -180,6 +198,19 @@ class DictionaryImportController {
}
}

_getImportLabel(stepIndex) {
switch (stepIndex) {
case -1:
case 0: return 'Loading dictionary';
case 1: return 'Loading schemas';
case 2: return 'Validating data';
case 3: return 'Processing data';
case 4: return 'Post-processing data';
case 5: return 'Importing data';
default: return '';
}
}

async _importDictionary(file, importDetails, onProgress) {
const dictionaryImporter = new DictionaryImporterThreaded(onProgress);
const archiveContent = await this._readFile(file);
Expand Down

0 comments on commit cd3f47a

Please sign in to comment.