Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
Show all changes
22 commits
Select commit Hold shift + click to select a range
9ba16cd
add DTS importer path and selector
briehl Dec 18, 2024
012e5ac
add gff_genome to bulk importers
briehl Dec 18, 2024
320dc0e
add staging upload config
briehl Feb 3, 2025
01e4a1b
Merge branch 'develop' into URO-352-dts-import
briehl Mar 26, 2025
b958129
do dts file path tweaking on the front end
briehl Mar 28, 2025
beecb8e
Merge branch 'develop' into URO-352-dts-import
briehl Mar 28, 2025
a386eed
Add focus on open. Tweak display icon.
exnehilo7 Jul 2, 2025
5935fba
Merge pull request #3697 from kbase/URO-366-users-should-be-able-to-p…
briehl Jul 22, 2025
d593c24
fix a couple tests
briehl Jul 31, 2025
c1e7d28
add tests for dts_manifest importer
briehl Aug 1, 2025
5833adc
Merge branch 'develop' into URO-352-dts-import
briehl Aug 1, 2025
36551e5
fdescribe -> describe
briehl Aug 1, 2025
e91b7b8
Bump the pip group across 1 directory with 25 updates
dependabot[bot] Aug 1, 2025
844c5b8
Merge pull request #3677 from kbase/URO-352-dts-import
briehl Aug 13, 2025
8d3721c
version bump to 5.5.0
briehl Aug 13, 2025
2ae69c4
Merge pull request #3701 from kbase/version-bump
briehl Aug 13, 2025
854e190
Merge branch 'develop' into update-python-deps-aug-2025
briehl Aug 13, 2025
ab977ba
update release notes
briehl Aug 13, 2025
4b83cbf
Merge pull request #3703 from kbase/update-python-deps-aug-2025
briehl Aug 13, 2025
73d279f
update js deps
briehl Aug 13, 2025
3962b1a
Merge pull request #3704 from kbase/update-js-deps-aug-2025
ialarmedalien Aug 13, 2025
fe5f7bc
Merge branch 'main' into develop
briehl Aug 14, 2025
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
73 changes: 72 additions & 1 deletion RELEASE_NOTES.md
Original file line number Diff line number Diff line change
Expand Up @@ -4,8 +4,79 @@ The Narrative Interface allows users to craft KBase Narratives using a combinati

This is built on the Jupyter Notebook v6.5.7 and IPython 8.28.x (more notes will follow).

## Version 5.5.0
- UIP-52 - fix integration tests
- PTV-1913 - force backup authentication cookie to reset on load
- URO-362 - add text for DOI requests
- URO-366 - users should be able to paste usernames into the share search field
- URO-352 - add support for DTS manifest.json importer files for bulk import
- adjust the readonly / writeable display toggle icon

- Python dependencies updated to the following versions:
- coverage: 7.10.1
- pytest: 8.4.1
- pytest-cov: 6.2.1
- pytest-recording: 0.13.4
- ruff: 0.12.7
- beautifulsoup4: 4.13.4
- certifi: 2025.7.14
- cryptography: 45.0.5
- jsonschema: 4.25.0
- markdown: 3.8.2
- pillow: 11.3.0
- plotly: 6.2.0
- pycurl: 7.45.6
- pygments: 2.19.2
- pyopenssl: 25.1.0
- rsa: 4.9.1
- setuptools: 80.9.0
- sympy: 1.14.0
- ipywidgets: 8.1.7
- jinja2: 3.1.6
- pandas: 2.3.1
- pymongo: 4.13.2
- requests: 2.32.4
- statsmodels: 0.14.5
- tornado: 6.5.1

- Javascript dependencies updated to the following versions:
- dompurify: 3.2.6
- follow-redirects: 1.15.11
- plotly.js-dist-min: 3.1.0
- @babel/traverse: 7.28.0
- @eslint/eslintrc: 3.3.1
- @eslint/js: 9.33.0
- @wdio/browserstack-service: 9.19.1
- @wdio/cli: 9.19.1
- @wdio/local-runner: 9.19.1
- @wdio/mocha-framework: 9.19.1
- @wdio/spec-reporter: 9.19.1
- autoprefixer: 10.4.21
- axios: 1.11.0
- chromedriver: 139.0.0
- commander: 14.0.0
- cssnano: 7.1.0
- eslint: 9.33.0
- eslint-config-prettier: 10.1.8
- glob: 11.0.3
- globals: 16.3.0
- jquery-migrate: 3.5.2
- lint-staged: 16.1.5
- postcss: 8.5.6
- postcss-cli: 11.0.1
- prettier: 3.6.2
- puppeteer: 24.16.1
- sass: 1.90.0
- selenium-standalone: 10.0.2
- selenium-webdriver: 4.35.0
- stylelint: 16.23.1
- stylelint-config-recommended: 17.0.0
- stylelint-config-standard: 39.0.0
- terser: 5.43.1


## Version 5.4.3
URO-363 - add note to the sharing panel to contact KBase about DOIs
- URO-363 - add note to the sharing panel to contact KBase about DOIs

- Python `requirements-general.txt` and `requirements.txt` merged into a single file so that all runtime deps are installed in one place.

Expand Down
2 changes: 1 addition & 1 deletion kbase-extension/kbase_templates/narrative_header.html
Original file line number Diff line number Diff line change
Expand Up @@ -27,7 +27,7 @@
</div>
<div>Created by: <span id="kb-narr-creator"></span>
<span id="kb-view-mode">
<span class="fa fa-pencil"></span>
<span class="fa fa-eye-slash"></span>
</span>
</div>
</div>
Expand Down
6 changes: 6 additions & 0 deletions kbase-extension/static/kbase/config/staging_upload.json
Original file line number Diff line number Diff line change
Expand Up @@ -52,6 +52,10 @@
"id": "import_specification",
"name": "Import Specification"
},
{
"id": "dts_manifest",
"name": "Data Transfer Service Manifest"
},
{
"id": "decompress",
"name": "Decompress/Unpack"
Expand All @@ -62,13 +66,15 @@
"fastq_reads_interleaved",
"fastq_reads_noninterleaved",
"gff_metagenome",
"gff_genome",
"assembly",
"genbank_genome"
],
"app_info": {
"web_upload": {
"app_id": "kb_uploadmethods/upload_web_file"
},
"dts_manifest": { },
"import_specification": { },
"test_fastq_reads": {
"app_id": "NarrativeTest/example_reads_upload",
Expand Down
5 changes: 4 additions & 1 deletion kbase-extension/static/kbase/js/api/StagingServiceClient.js
Original file line number Diff line number Diff line change
Expand Up @@ -21,7 +21,10 @@ define(['RestAPIClient'], (RestAPIClient) => {
rename: { method: 'post', path: 'rename/${path}' },
decompress: { method: 'patch', path: 'decompress/${path}' },
importer_mappings: { method: 'get', path: 'importer_mappings/?${file_list}' },
bulkSpecification: { method: 'get', path: 'bulk_specification/?files=${files}' },
bulkSpecification: {
method: 'get',
path: 'bulk_specification/?files=${files}&${flag}',
},
write_bulk_specification: { method: 'post', path: 'write_bulk_specification/' },
},
});
Expand Down
6 changes: 6 additions & 0 deletions kbase-extension/static/kbase/js/kbaseNarrative.js
Original file line number Diff line number Diff line change
Expand Up @@ -360,6 +360,12 @@ define([
shareWidget.refresh();
}
shareDialog.show();

// After a few seconds, focus the share field.
setTimeout(() => {
$('.select2-search__field').focus();
}, 2000);

});
};

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -358,7 +358,7 @@ define([
// mechanism should be disabled (and the button hidden as well.)
const icon = $('#kb-view-mode span');
icon.toggleClass('fa-eye', this.uiMode === 'view');
icon.toggleClass('fa-pencil', this.uiMode === 'edit');
icon.toggleClass('fa-eye-slash', this.uiMode === 'edit');
Jupyter.narrative.readonly = this.uiMode === 'view';

// Warning, do not look for the code for this ... it will burn your
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -14,7 +14,7 @@ define([

/**
* This makes a call to the Staging Service to fetch information from bulk specification files.
* This then gets processed through `processSpreadsheetFileData` before being returned.
* This then gets processed through `processBulkImportSpecData` before being returned.
* @param {Array[string]} files - array of file path names to treat as import specifications
* @returns Promise that resolves into information that can be used to open a bulk import cell.
* This has the format:
Expand All @@ -32,45 +32,18 @@ define([
* }
* }
* @throws Errors.ImportSetupError if an error occurs in either data fetching from the Staging
* Service, or in the initial parsing done by `processSpreadsheetFileData`
* Service, or in the initial parsing done by `processBulkImportSpecData`
*/
function getSpreadsheetFileInfo(files) {
if (!files || files.length === 0) {
return Promise.resolve({});
}
const stagingUrl = Config.url('staging_api_url');
const stagingServiceClient = new StagingServiceClient({
root: stagingUrl,
token: Runtime.make().authToken(),
});

// This is overkill, but a little future proofing. We have to make a GET call with an
// undetermined number of files, so if there are more than we can allow in the URL, gotta break that
// into multiple calls.
function getBulkImportFileInfo(xsvFiles, dtsFiles) {
// dtsFiles gets wiped out by the spec requester.
const dtsFileList = [...dtsFiles];

// a little cheating here to figure out the length allowance. Maybe it should be in the client?
const maxQueryLength = 2048 - stagingUrl.length - '/bulk_specification/?files='.length;
const bulkSpecProms = [];

while (files.length) {
const fileBatch = [];
let remainingLength = maxQueryLength;
while (
files.length &&
remainingLength - files[0].length - 1 >= 0 // -1 is for the comma
) {
const nextFile = files.shift();
fileBatch.push(nextFile);
remainingLength -= nextFile.length + 1;
}
bulkSpecProms.push(
stagingServiceClient.bulkSpecification({
files: encodeURIComponent(fileBatch.join(',')),
})
);
}
// noting here that these are effectively jQuery promises, so we can't just
// make this an async/await function, but need to wrap with Promise.all.
const xsvFileProms = requestBulkImportSpec(xsvFiles);
const dtsFileProms = requestBulkImportSpec(dtsFiles, 'dts');

return Promise.all(bulkSpecProms)
return Promise.all([...xsvFileProms, ...dtsFileProms])
.then((result) => {
// join results of all calls together
const errors = [];
Expand All @@ -92,6 +65,13 @@ define([
} else {
allCalls.types[dataType] = callResult.types[dataType];
allCalls.files[dataType] = callResult.files[dataType];
// These files have the username in there. We don't want that.
// So need to compare after stripping them out.
const fileName = allCalls.files[dataType].file;
const pathIdx = fileName.indexOf('/');
const strippedFile =
pathIdx !== -1 ? fileName.substring(pathIdx + 1) : fileName;
allCalls.files[dataType].isDts = dtsFileList.includes(strippedFile);
}
});
return allCalls;
Expand Down Expand Up @@ -123,10 +103,54 @@ define([
);
})
.then((result) => {
return processSpreadsheetFileData(result);
return processBulkImportSpecData(result);
});
}

function requestBulkImportSpec(files, flag = '') {
/**
* This returns an array of jQuery Promises, which is what
* the staging service client uses, so it can't be (easily)
* cast into async/await.
*/
if (!files || files.length === 0) {
return [];
}
const stagingUrl = Config.url('staging_api_url');
const stagingServiceClient = new StagingServiceClient({
root: stagingUrl,
token: Runtime.make().authToken(),
});
// This is overkill, but a little future proofing. We have to make a GET call with an
// undetermined number of files, so if there are more than we can allow in the URL, gotta break that
// into multiple calls.

// a little cheating here to figure out the length allowance. Maybe it should be in the client?
const path = '/bulk_specification/?' + flag + 'files=&';
const maxQueryLength = 2048 - stagingUrl.length - path.length;
const bulkSpecProms = [];

while (files.length) {
const fileBatch = [];
let remainingLength = maxQueryLength;
while (
files.length &&
remainingLength - files[0].length - 1 >= 0 // -1 is for the comma
) {
const nextFile = files.shift();
fileBatch.push(nextFile);
remainingLength -= nextFile.length + 1;
}
bulkSpecProms.push(
stagingServiceClient.bulkSpecification({
files: encodeURIComponent(fileBatch.join(',')),
flag,
})
);
}
return bulkSpecProms;
}

/**
* This function does some preprocessing on the spreadsheet file data. Specifically,
* those parameters that are static dropdowns or checkboxes need to translate their input
Expand All @@ -153,10 +177,14 @@ define([
* TODO: also return the fetched app specs to avoid fetching them twice?
* @param {Object} data
*/
async function processSpreadsheetFileData(data) {
async function processBulkImportSpecData(data) {
// map from given datatype to app id.
// if any data types are missing, record that
// if any data types are not bulk import ready, record that, too.

if (Object.keys(data.types).length === 0 && Object.keys(data.files).length === 0) {
return data;
}
const appIdToType = {};
const dataTypeErrors = [];
Object.keys(data.types).forEach((dataType) => {
Expand Down Expand Up @@ -253,19 +281,58 @@ define([
{}
);

/*
* Map from datatype to all file parameters. These are found (as in the bulk import cell)
* by looking for those params that are dynamic dropdowns that look at ftp_staging.
*/
const typeToFileParams = Object.entries(appIdToType).reduce(
(_typeToFileParams, [appId, dataType]) => {
const spec = appIdToSpec[appId].appSpec;
const specParams = spec.parameters.filter((param) => {
return (
param.dynamic_dropdown_options &&
param.dynamic_dropdown_options.data_source === 'ftp_staging'
);
});
_typeToFileParams[dataType] = specParams.map((param) => param.id);
return _typeToFileParams;
},
{}
);

/*
* Now, update all parameters in place.
* For each set of parameters in each type, look at the translated spec parameters.
* If any of those are in the given parameter set, do the translation.
*
* If the datatype comes from a DTS manifest file, adjust the file paths to always be
* relative to the subdirectory of that file.
*/
Object.values(appIdToType).forEach((dataType) => {
const specParams = typeToAlteredParams[dataType];
const fileParams = typeToFileParams[dataType];
let filePrefix = '';
if (data.files[dataType].isDts) {
const file = data.files[dataType].file;
const parts = file.split('/');
if (parts.length > 2) {
filePrefix = parts.slice(1, -1).join('/') + '/';
}
}

data.types[dataType] = data.types[dataType].map((parameterSet) => {
Object.keys(parameterSet).forEach((paramId) => {
const value = parameterSet[paramId];
if (specParams[paramId] && value in specParams[paramId]) {
parameterSet[paramId] = specParams[paramId][value];
}
if (
data.files[dataType].isDts &&
fileParams.includes(paramId) &&
parameterSet[paramId] !== null
) {
parameterSet[paramId] = filePrefix + parameterSet[paramId];
}
});
return parameterSet;
});
Expand Down Expand Up @@ -390,6 +457,7 @@ define([
const bulkFiles = {};
const singleFiles = [];
const xsvFiles = [];
const dtsFiles = [];
fileInfo.forEach((file) => {
const importType = file.type;
if (bulkIds.has(importType)) {
Expand All @@ -404,11 +472,13 @@ define([
bulkFiles[importType].files.push(file.name);
} else if (importType === 'import_specification') {
xsvFiles.push(file.name);
} else if (importType === 'dts_manifest') {
dtsFiles.push(file.name);
} else {
singleFiles.push(file);
}
});
return getSpreadsheetFileInfo(xsvFiles)
return getBulkImportFileInfo(xsvFiles, dtsFiles)
.then((result) => {
if (result.types) {
Object.keys(result.types).forEach((dataType) => {
Expand Down
Loading
Loading