@@ -4,195 +4,163 @@ const gdal = require("gdal-async");
4
4
const fs = require ( "fs" ) ;
5
5
const path = require ( "path" ) ;
6
6
const extract = require ( "extract-zip" ) ;
7
- const cors = require ( 'cors' ) ;
8
- const rateLimit = require ( 'express-rate-limit' ) ;
7
+ const cors = require ( "cors" ) ;
8
+ const rateLimit = require ( "express-rate-limit" ) ;
9
+ const awsServerlessExpressMiddleware = require ( "aws-serverless-express/middleware" ) ;
9
10
10
11
const app = express ( ) ;
11
- app . set ( 'trust proxy' , 1 ) ;
12
+ app . set ( "trust proxy" , 1 ) ;
13
+ app . disable ( "x-powered-by" ) ;
12
14
13
- // Hide X-Powered-By header to prevent Express version disclosure
14
- app . disable ( 'x-powered-by' ) ;
15
+ app . use ( awsServerlessExpressMiddleware . eventContext ( ) ) ;
15
16
16
17
const limiter = rateLimit ( {
17
- windowMs : 15 * 60 * 1000 , // 15 minutes
18
- max : 100 // limit each IP to 100 requests per windowMs
18
+ windowMs : 15 * 60 * 1000 , // 15 minutes
19
+ max : 100 ,
19
20
} ) ;
20
-
21
21
app . use ( limiter ) ;
22
+
23
+ app . use ( cors ( {
24
+ origin : [
25
+ "http://localhost:4200" ,
26
+ "https://wfprev-dev.nrs.gov.bc.ca" ,
27
+ "https://wfprev-tst.nrs.gov.bc.ca" ,
28
+ "https://wfprev.nrs.gov.bc.ca"
29
+ ]
30
+ } ) ) ;
31
+
32
+ // Decode base64 body if necessary (Lambda -> API Gateway)
22
33
app . use ( ( req , res , next ) => {
23
- console . log ( `[${ new Date ( ) . toISOString ( ) } ] ${ req . method } ${ req . url } ` ) ;
24
- next ( ) ;
25
- } ) ;
26
- app . use ( fileUpload ( ) ) ;
34
+ const event = req . apiGateway ?. event ;
35
+ if ( event ?. isBase64Encoded && event . body ) {
36
+ const buff = Buffer . from ( event . body , "base64" ) ;
37
+ req . body = buff ;
38
+ req . headers [ "content-length" ] = buff . length ;
39
+ }
40
+ next ( ) ;
41
+ } ) ;
27
42
28
- const uploadDir = '/tmp/uploads' ;
43
+ app . use ( ( req , res , next ) => {
44
+ console . log ( `[${ new Date ( ) . toISOString ( ) } ] ${ req . method } ${ req . url } ` ) ;
45
+ next ( ) ;
46
+ } ) ;
29
47
48
+ app . use ( fileUpload ( ) ) ;
49
+
50
+ const uploadDir = "/tmp/uploads" ;
30
51
if ( ! fs . existsSync ( uploadDir ) ) {
31
- fs . mkdirSync ( uploadDir ) ;
52
+ fs . mkdirSync ( uploadDir , { recursive : true } ) ;
32
53
}
33
54
34
- // Function to validate file path (prevent zip slip vulnerability)
35
55
function isValidPath ( filePath , destinationPath ) {
36
- const normalizedPath = path . normalize ( filePath ) ;
37
- // Check if the normalized path attempts to navigate outside the destination directory
38
- return normalizedPath . startsWith ( destinationPath ) ;
56
+ const normalizedPath = path . normalize ( filePath ) ;
57
+ return normalizedPath . startsWith ( destinationPath ) ;
39
58
}
40
59
41
- // Extract the handler function so it can be tested independently
42
60
async function handleUpload ( req , res ) {
43
- if ( ! req . files || ! req . files . file ) {
44
- return res . status ( 400 ) . send ( "No file uploaded." ) ;
45
- }
46
-
47
- // Validate file extension
48
- const fileName = req . files . file . name ;
49
- if ( ! fileName . toLowerCase ( ) . endsWith ( '.zip' ) ) {
50
- return res . status ( 400 ) . send ( "Only ZIP files are allowed." ) ;
51
- }
52
-
53
- let zipPath = path . join ( __dirname , "uploads" , fileName ) ;
54
- const unzipPath = path . resolve ( __dirname , "uploads" , path . basename ( fileName , '.zip' ) ) ;
55
-
56
- // Validate zipPath to ensure it is within the uploads directory
57
- zipPath = path . resolve ( zipPath ) ;
58
- if ( ! zipPath . startsWith ( path . resolve ( __dirname , "uploads" ) ) ) {
59
- return res . status ( 400 ) . send ( "Invalid file path." ) ;
60
- }
61
-
62
- await req . files . file . mv ( zipPath ) ;
63
-
64
- try {
65
- // Use onEntry callback to validate each file path before extraction
66
- await extract ( zipPath , {
67
- dir : unzipPath ,
68
- onEntry : ( entry ) => {
69
- const destPath = path . join ( unzipPath , entry . fileName ) ;
70
- // Validate path to prevent zip slip attack
71
- if ( ! isValidPath ( destPath , unzipPath ) ) {
72
- throw new Error ( `Attempted zip slip attack with file: ${ entry . fileName } ` ) ;
73
- }
74
- }
75
- } ) ;
76
- } catch ( err ) {
77
- console . error ( "Extraction failed:" , err ) ;
78
- // Clean up the zip file
79
- try {
80
- fs . unlinkSync ( zipPath ) ;
81
- } catch ( cleanupErr ) {
82
- console . error ( "Error during cleanup:" , cleanupErr ) ;
61
+ if ( ! req . files || ! req . files . file ) {
62
+ return res . status ( 400 ) . send ( "No file uploaded." ) ;
63
+ }
64
+
65
+ const fileName = req . files . file . name ;
66
+ if ( ! fileName . toLowerCase ( ) . endsWith ( ".zip" ) ) {
67
+ return res . status ( 400 ) . send ( "Only ZIP files are allowed." ) ;
68
+ }
69
+
70
+ const zipPath = path . join ( uploadDir , fileName ) ;
71
+ const unzipPath = path . join ( uploadDir , path . basename ( fileName , ".zip" ) ) ;
72
+
73
+ await req . files . file . mv ( zipPath ) ;
74
+
75
+ try {
76
+ await extract ( zipPath , {
77
+ dir : unzipPath ,
78
+ onEntry : ( entry ) => {
79
+ const destPath = path . join ( unzipPath , entry . fileName ) ;
80
+ if ( ! isValidPath ( destPath , unzipPath ) ) {
81
+ throw new Error ( `Zip slip detected: ${ entry . fileName } ` ) ;
83
82
}
84
- return res . status ( 500 ) . send ( "Extraction failed." ) ;
85
- }
86
-
87
- // Locate .gdb folder
88
- let extractedFiles ;
89
- try {
90
- extractedFiles = fs . readdirSync ( unzipPath ) ;
91
- } catch ( err ) {
92
- console . error ( "Error reading extracted files:" , err ) ;
93
- return res . status ( 500 ) . send ( "Error reading extracted files." ) ;
94
- }
95
-
96
- const gdbFolder = extractedFiles . find ( f => f . endsWith ( ".gdb" ) ) ;
97
-
98
- if ( ! gdbFolder ) {
99
- return res . status ( 400 ) . send ( "No .gdb found." ) ;
100
- }
101
-
102
- const gdbPath = path . join ( unzipPath , gdbFolder ) ;
103
-
104
- let dataset ;
105
- let results = [ ] ;
106
-
107
- try {
108
- // Read GDB and extract coordinates
109
- dataset = gdal . open ( gdbPath ) ;
110
-
111
- dataset . layers . forEach ( ( layer ) => {
112
- layer . features . forEach ( ( feature ) => {
113
- const geom = feature . getGeometry ( ) ;
114
- if ( geom ) results . push ( JSON . parse ( geom . toJSON ( ) ) ) ;
83
+ }
84
+ } ) ;
85
+ } catch ( err ) {
86
+ console . error ( "Extraction failed:" , err ) ;
87
+ try { fs . unlinkSync ( zipPath ) ; } catch ( cleanupErr ) { }
88
+ return res . status ( 500 ) . send ( "Extraction failed." ) ;
89
+ }
90
+
91
+ let extractedFiles ;
92
+ try {
93
+ extractedFiles = fs . readdirSync ( unzipPath ) ;
94
+ } catch ( err ) {
95
+ console . error ( "Failed to read extracted dir:" , err ) ;
96
+ return res . status ( 500 ) . send ( "Could not read extracted files." ) ;
97
+ }
98
+
99
+ const gdbFolder = extractedFiles . find ( f => f . endsWith ( ".gdb" ) ) ;
100
+ if ( ! gdbFolder ) {
101
+ return res . status ( 400 ) . send ( "No .gdb found." ) ;
102
+ }
103
+
104
+ const gdbPath = path . join ( unzipPath , gdbFolder ) ;
105
+
106
+ let dataset ;
107
+ const results = [ ] ;
108
+
109
+ try {
110
+ dataset = gdal . open ( gdbPath ) ;
111
+ dataset . layers . forEach ( ( layer ) => {
112
+ layer . features . forEach ( ( feature ) => {
113
+ const geom = feature . getGeometry ( ) ;
114
+ if ( geom ) results . push ( JSON . parse ( geom . toJSON ( ) ) ) ;
115
+ } ) ;
116
+ } ) ;
117
+
118
+ dataset . close ( ) ;
119
+ dataset = null ;
120
+
121
+ res . json ( results ) ;
122
+
123
+ // Post-response cleanup
124
+ setTimeout ( ( ) => {
125
+ try {
126
+ if ( fs . existsSync ( zipPath ) ) fs . unlinkSync ( zipPath ) ;
127
+
128
+ const deleteFolderRecursive = ( dirPath ) => {
129
+ if ( fs . existsSync ( dirPath ) ) {
130
+ fs . readdirSync ( dirPath ) . forEach ( ( file ) => {
131
+ const curPath = path . join ( dirPath , file ) ;
132
+ if ( fs . lstatSync ( curPath ) . isDirectory ( ) ) {
133
+ deleteFolderRecursive ( curPath ) ;
134
+ } else {
135
+ fs . unlinkSync ( curPath ) ;
136
+ }
115
137
} ) ;
116
- } ) ;
117
-
118
- // Important: Close the dataset before cleaning up
138
+ fs . rmdirSync ( dirPath ) ;
139
+ }
140
+ } ;
141
+ deleteFolderRecursive ( unzipPath ) ;
142
+ } catch ( cleanupErr ) {
143
+ console . error ( "Cleanup failed:" , cleanupErr ) ;
144
+ }
145
+ } , 2000 ) ;
146
+ } catch ( err ) {
147
+ console . error ( "Error reading GDB:" , err ) ;
148
+ if ( dataset ) {
149
+ try {
119
150
dataset . close ( ) ;
120
- dataset = null ;
121
-
122
- // Send response before cleanup
123
- res . json ( results ) ;
124
-
125
- // Clean up after response has been sent
126
- setTimeout ( ( ) => {
127
- try {
128
- // Use the fs module to delete files one by one instead of rimraf
129
- const deleteFolderRecursive = function ( directoryPath ) {
130
- if ( fs . existsSync ( directoryPath ) ) {
131
- fs . readdirSync ( directoryPath ) . forEach ( ( file ) => {
132
- const curPath = path . join ( directoryPath , file ) ;
133
- if ( fs . lstatSync ( curPath ) . isDirectory ( ) ) {
134
- // Recursive
135
- deleteFolderRecursive ( curPath ) ;
136
- } else {
137
- // Delete file
138
- fs . unlinkSync ( curPath ) ;
139
- }
140
- } ) ;
141
- fs . rmdirSync ( directoryPath ) ;
142
- }
143
- } ;
144
-
145
- try {
146
- // Delete the zip file
147
- if ( fs . existsSync ( zipPath ) ) {
148
- fs . unlinkSync ( zipPath ) ;
149
- }
150
-
151
- // Delete the extracted directory
152
- deleteFolderRecursive ( unzipPath ) ;
153
-
154
- // Ensure uploads directory exists
155
- if ( ! fs . existsSync ( "uploads" ) ) {
156
- fs . mkdirSync ( "uploads" ) ;
157
- }
158
- } catch ( cleanupErr ) {
159
- console . error ( "Manual cleanup error:" , cleanupErr ) ;
160
- }
161
- } catch ( error ) {
162
- console . error ( "Cleanup operation failed:" , error ) ;
163
- }
164
- } , 2000 ) ; // 2 second delay for safer cleanup
165
-
166
- } catch ( err ) {
167
- console . error ( "Error reading GDB:" , err ) ;
168
- // Make sure to close the dataset if it was opened
169
- if ( dataset ) {
170
- try {
171
- dataset . close ( ) ;
172
- dataset = null ;
173
- } catch ( closeErr ) {
174
- console . error ( "Error closing dataset:" , closeErr ) ;
175
- }
176
- }
177
-
178
- return res . status ( 500 ) . send ( "Failed to read GDB." ) ;
151
+ } catch ( _ ) { }
179
152
}
153
+ return res . status ( 500 ) . send ( "Failed to read GDB." ) ;
154
+ }
180
155
}
181
156
182
- // TO-DO - update this to use Github secrets in WFPREV-402 terraform tasks
183
- app . use ( cors ( {
184
- origin : [ 'http://localhost:4200' , 'https://wfprev-dev.nrs.gov.bc.ca' , 'https://wfprev-tst.nrs.gov.bc.ca/' , 'https://wfprev.nrs.gov.bc.ca' ]
185
- } ) ) ;
186
-
187
- // Set up route
188
157
app . post ( "/upload" , handleUpload ) ;
189
158
190
159
if ( require . main === module ) {
191
- // Start the server only if the file is run directly, not during tests
192
- const server = app . listen ( 3000 , ( ) => console . log ( "Server running on port 3000" ) ) ;
160
+ app . listen ( 3000 , ( ) => console . log ( "Server running on port 3000" ) ) ;
193
161
}
194
-
162
+
195
163
module . exports = {
196
- app,
197
- handleUpload
198
- }
164
+ app,
165
+ handleUpload
166
+ } ;
0 commit comments