@@ -14,7 +14,7 @@ define([
14
14
15
15
/**
16
16
* This makes a call to the Staging Service to fetch information from bulk specification files.
17
- * This then gets processed through `processSpreadsheetFileData ` before being returned.
17
+ * This then gets processed through `processBulkImportSpecData ` before being returned.
18
18
* @param {Array[string] } files - array of file path names to treat as import specifications
19
19
* @returns Promise that resolves into information that can be used to open a bulk import cell.
20
20
* This has the format:
@@ -32,45 +32,18 @@ define([
32
32
* }
33
33
* }
34
34
* @throws Errors.ImportSetupError if an error occurs in either data fetching from the Staging
35
- * Service, or in the initial parsing done by `processSpreadsheetFileData `
35
+ * Service, or in the initial parsing done by `processBulkImportSpecData `
36
36
*/
37
- function getSpreadsheetFileInfo ( files ) {
38
- if ( ! files || files . length === 0 ) {
39
- return Promise . resolve ( { } ) ;
40
- }
41
- const stagingUrl = Config . url ( 'staging_api_url' ) ;
42
- const stagingServiceClient = new StagingServiceClient ( {
43
- root : stagingUrl ,
44
- token : Runtime . make ( ) . authToken ( ) ,
45
- } ) ;
46
-
47
- // This is overkill, but a little future proofing. We have to make a GET call with an
48
- // undetermined number of files, so if there are more than we can allow in the URL, gotta break that
49
- // into multiple calls.
37
+ function getBulkImportFileInfo ( xsvFiles , dtsFiles ) {
38
+ // dtsFiles gets wiped out by the spec requester.
39
+ const dtsFileList = [ ...dtsFiles ] ;
50
40
51
- // a little cheating here to figure out the length allowance. Maybe it should be in the client?
52
- const maxQueryLength = 2048 - stagingUrl . length - '/bulk_specification/?files=' . length ;
53
- const bulkSpecProms = [ ] ;
54
-
55
- while ( files . length ) {
56
- const fileBatch = [ ] ;
57
- let remainingLength = maxQueryLength ;
58
- while (
59
- files . length &&
60
- remainingLength - files [ 0 ] . length - 1 >= 0 // -1 is for the comma
61
- ) {
62
- const nextFile = files . shift ( ) ;
63
- fileBatch . push ( nextFile ) ;
64
- remainingLength -= nextFile . length + 1 ;
65
- }
66
- bulkSpecProms . push (
67
- stagingServiceClient . bulkSpecification ( {
68
- files : encodeURIComponent ( fileBatch . join ( ',' ) ) ,
69
- } )
70
- ) ;
71
- }
41
+ // noting here that these are effectively jQuery promises, so we can't just
42
+ // make this an async/await function, but need to wrap with Promise.all.
43
+ const xsvFileProms = requestBulkImportSpec ( xsvFiles ) ;
44
+ const dtsFileProms = requestBulkImportSpec ( dtsFiles , 'dts' ) ;
72
45
73
- return Promise . all ( bulkSpecProms )
46
+ return Promise . all ( [ ... xsvFileProms , ... dtsFileProms ] )
74
47
. then ( ( result ) => {
75
48
// join results of all calls together
76
49
const errors = [ ] ;
@@ -92,6 +65,13 @@ define([
92
65
} else {
93
66
allCalls . types [ dataType ] = callResult . types [ dataType ] ;
94
67
allCalls . files [ dataType ] = callResult . files [ dataType ] ;
68
+ // These files have the username in there. We don't want that.
69
+ // So need to compare after stripping them out.
70
+ const fileName = allCalls . files [ dataType ] . file ;
71
+ const pathIdx = fileName . indexOf ( '/' ) ;
72
+ const strippedFile =
73
+ pathIdx !== - 1 ? fileName . substring ( pathIdx + 1 ) : fileName ;
74
+ allCalls . files [ dataType ] . isDts = dtsFileList . includes ( strippedFile ) ;
95
75
}
96
76
} ) ;
97
77
return allCalls ;
@@ -123,10 +103,54 @@ define([
123
103
) ;
124
104
} )
125
105
. then ( ( result ) => {
126
- return processSpreadsheetFileData ( result ) ;
106
+ return processBulkImportSpecData ( result ) ;
127
107
} ) ;
128
108
}
129
109
110
+ function requestBulkImportSpec ( files , flag = '' ) {
111
+ /**
112
+ * This returns an array of jQuery Promises, which is what
113
+ * the staging service client uses, so it can't be (easily)
114
+ * cast into async/await.
115
+ */
116
+ if ( ! files || files . length === 0 ) {
117
+ return [ ] ;
118
+ }
119
+ const stagingUrl = Config . url ( 'staging_api_url' ) ;
120
+ const stagingServiceClient = new StagingServiceClient ( {
121
+ root : stagingUrl ,
122
+ token : Runtime . make ( ) . authToken ( ) ,
123
+ } ) ;
124
+ // This is overkill, but a little future proofing. We have to make a GET call with an
125
+ // undetermined number of files, so if there are more than we can allow in the URL, gotta break that
126
+ // into multiple calls.
127
+
128
+ // a little cheating here to figure out the length allowance. Maybe it should be in the client?
129
+ const path = '/bulk_specification/?' + flag + 'files=&' ;
130
+ const maxQueryLength = 2048 - stagingUrl . length - path . length ;
131
+ const bulkSpecProms = [ ] ;
132
+
133
+ while ( files . length ) {
134
+ const fileBatch = [ ] ;
135
+ let remainingLength = maxQueryLength ;
136
+ while (
137
+ files . length &&
138
+ remainingLength - files [ 0 ] . length - 1 >= 0 // -1 is for the comma
139
+ ) {
140
+ const nextFile = files . shift ( ) ;
141
+ fileBatch . push ( nextFile ) ;
142
+ remainingLength -= nextFile . length + 1 ;
143
+ }
144
+ bulkSpecProms . push (
145
+ stagingServiceClient . bulkSpecification ( {
146
+ files : encodeURIComponent ( fileBatch . join ( ',' ) ) ,
147
+ flag,
148
+ } )
149
+ ) ;
150
+ }
151
+ return bulkSpecProms ;
152
+ }
153
+
130
154
/**
131
155
* This function does some preprocessing on the spreadsheet file data. Specifically,
132
156
* those parameters that are static dropdowns or checkboxes need to translate their input
@@ -153,10 +177,14 @@ define([
153
177
* TODO: also return the fetched app specs to avoid fetching them twice?
154
178
* @param {Object } data
155
179
*/
156
- async function processSpreadsheetFileData ( data ) {
180
+ async function processBulkImportSpecData ( data ) {
157
181
// map from given datatype to app id.
158
182
// if any data types are missing, record that
159
183
// if any data types are not bulk import ready, record that, too.
184
+
185
+ if ( Object . keys ( data . types ) . length === 0 && Object . keys ( data . files ) . length === 0 ) {
186
+ return data ;
187
+ }
160
188
const appIdToType = { } ;
161
189
const dataTypeErrors = [ ] ;
162
190
Object . keys ( data . types ) . forEach ( ( dataType ) => {
@@ -253,19 +281,58 @@ define([
253
281
{ }
254
282
) ;
255
283
284
+ /*
285
+ * Map from datatype to all file parameters. These are found (as in the bulk import cell)
286
+ * by looking for those params that are dynamic dropdowns that look at ftp_staging.
287
+ */
288
+ const typeToFileParams = Object . entries ( appIdToType ) . reduce (
289
+ ( _typeToFileParams , [ appId , dataType ] ) => {
290
+ const spec = appIdToSpec [ appId ] . appSpec ;
291
+ const specParams = spec . parameters . filter ( ( param ) => {
292
+ return (
293
+ param . dynamic_dropdown_options &&
294
+ param . dynamic_dropdown_options . data_source === 'ftp_staging'
295
+ ) ;
296
+ } ) ;
297
+ _typeToFileParams [ dataType ] = specParams . map ( ( param ) => param . id ) ;
298
+ return _typeToFileParams ;
299
+ } ,
300
+ { }
301
+ ) ;
302
+
256
303
/*
257
304
* Now, update all parameters in place.
258
305
* For each set of parameters in each type, look at the translated spec parameters.
259
306
* If any of those are in the given parameter set, do the translation.
307
+ *
308
+ * If the datatype comes from a DTS manifest file, adjust the file paths to always be
309
+ * relative to the subdirectory of that file.
260
310
*/
261
311
Object . values ( appIdToType ) . forEach ( ( dataType ) => {
262
312
const specParams = typeToAlteredParams [ dataType ] ;
313
+ const fileParams = typeToFileParams [ dataType ] ;
314
+ let filePrefix = '' ;
315
+ if ( data . files [ dataType ] . isDts ) {
316
+ const file = data . files [ dataType ] . file ;
317
+ const parts = file . split ( '/' ) ;
318
+ if ( parts . length > 2 ) {
319
+ filePrefix = parts . slice ( 1 , - 1 ) . join ( '/' ) + '/' ;
320
+ }
321
+ }
322
+
263
323
data . types [ dataType ] = data . types [ dataType ] . map ( ( parameterSet ) => {
264
324
Object . keys ( parameterSet ) . forEach ( ( paramId ) => {
265
325
const value = parameterSet [ paramId ] ;
266
326
if ( specParams [ paramId ] && value in specParams [ paramId ] ) {
267
327
parameterSet [ paramId ] = specParams [ paramId ] [ value ] ;
268
328
}
329
+ if (
330
+ data . files [ dataType ] . isDts &&
331
+ fileParams . includes ( paramId ) &&
332
+ parameterSet [ paramId ] !== null
333
+ ) {
334
+ parameterSet [ paramId ] = filePrefix + parameterSet [ paramId ] ;
335
+ }
269
336
} ) ;
270
337
return parameterSet ;
271
338
} ) ;
@@ -390,6 +457,7 @@ define([
390
457
const bulkFiles = { } ;
391
458
const singleFiles = [ ] ;
392
459
const xsvFiles = [ ] ;
460
+ const dtsFiles = [ ] ;
393
461
fileInfo . forEach ( ( file ) => {
394
462
const importType = file . type ;
395
463
if ( bulkIds . has ( importType ) ) {
@@ -404,11 +472,13 @@ define([
404
472
bulkFiles [ importType ] . files . push ( file . name ) ;
405
473
} else if ( importType === 'import_specification' ) {
406
474
xsvFiles . push ( file . name ) ;
475
+ } else if ( importType === 'dts_manifest' ) {
476
+ dtsFiles . push ( file . name ) ;
407
477
} else {
408
478
singleFiles . push ( file ) ;
409
479
}
410
480
} ) ;
411
- return getSpreadsheetFileInfo ( xsvFiles )
481
+ return getBulkImportFileInfo ( xsvFiles , dtsFiles )
412
482
. then ( ( result ) => {
413
483
if ( result . types ) {
414
484
Object . keys ( result . types ) . forEach ( ( dataType ) => {
0 commit comments