Skip to content

Commit f32bd73

Browse files
committed
Added filepath check
1 parent 5cd38a4 commit f32bd73

File tree

3 files changed

+34
-19
lines changed

3 files changed

+34
-19
lines changed

netbox_push.py

Lines changed: 9 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -1,9 +1,13 @@
11
import csv
2-
import pynetbox
3-
from netbox_connection import connect_to_netbox
2+
import os
43
from concurrent.futures import ThreadPoolExecutor
54
import configparser
5+
import pynetbox
66
from tqdm import tqdm
7+
from netbox_connection import connect_to_netbox
8+
9+
# Get the directory of the current script
10+
script_dir = os.path.dirname(os.path.abspath(__file__))
711

812
def process_row(row, pbar):
913
"""
@@ -67,7 +71,8 @@ def write_data_to_netbox(url, token, csv_file):
6771
global netbox
6872
netbox = connect_to_netbox(url, token)
6973

70-
with open(csv_file, 'r') as file:
74+
csv_file_path = os.path.join(script_dir, csv_file)
75+
with open(csv_file_path, 'r') as file:
7176
reader = csv.DictReader(file)
7277
rows = list(reader)
7378

@@ -81,7 +86,7 @@ def write_data_to_netbox(url, token, csv_file):
8186

8287
# Read URL and token from var.ini
8388
config = configparser.ConfigParser()
84-
config.read('var.ini')
89+
config.read(os.path.join(script_dir, 'var.ini'))
8590
url = config['credentials']['url']
8691
token = config['credentials']['token']
8792

nmap_compare.py

Lines changed: 10 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -2,6 +2,9 @@
22
from datetime import datetime
33
import os
44

5+
# Get the directory of the current script
6+
script_dir = os.path.dirname(os.path.abspath(__file__))
7+
58
def get_file_path(directory, date_time):
69
"""
710
Generate a file path based on the directory and date.
@@ -13,7 +16,7 @@ def get_file_path(directory, date_time):
1316
Returns:
1417
- file_path (str): The full file path based on the directory and date.
1518
"""
16-
return os.path.join(directory, f'nmap_results_{date_time.strftime("%Y-%m-%d_%H-%M-%S")}.csv')
19+
return os.path.join(script_dir, directory, f'nmap_results_{date_time.strftime("%Y-%m-%d_%H-%M-%S")}.csv')
1720

1821
def get_latest_files(directory, num_files=2):
1922
"""
@@ -26,19 +29,20 @@ def get_latest_files(directory, num_files=2):
2629
Returns:
2730
- files (list): The list of latest CSV file names.
2831
"""
29-
files = [f for f in os.listdir(directory) if f.endswith('.csv')]
30-
files.sort(key=lambda x: os.path.getmtime(os.path.join(directory, x)), reverse=True)
32+
full_directory = os.path.join(script_dir, directory)
33+
files = [f for f in os.listdir(full_directory) if f.endswith('.csv')]
34+
files.sort(key=lambda x: os.path.getmtime(os.path.join(full_directory, x)), reverse=True)
3135
return files[:num_files]
3236

3337
# Directory for result files
34-
directory = 'results/'
38+
directory = 'results'
3539

3640
# Get the two latest file paths
3741
latest_files = get_latest_files(directory)
3842
file_paths = [get_file_path(directory, datetime.strptime(file_name[13:32], "%Y-%m-%d_%H-%M-%S")) for file_name in latest_files]
3943

4044
# Output file path
41-
output_file_path = 'ipam_addresses.csv'
45+
output_file_path = os.path.join(script_dir, 'ipam_addresses.csv')
4246

4347
def read_csv(file_path):
4448
"""
@@ -67,7 +71,7 @@ def write_csv(data, file_path):
6771
- file_path (str): The path to the output CSV file.
6872
"""
6973
with open(file_path, 'w', newline='') as file:
70-
fieldnames = ['address', 'dns_name', 'status', 'scantime', 'tags', 'tenant', 'VRF'] # Added 'VRF' to fieldnames
74+
fieldnames = ['address', 'dns_name', 'status', 'scantime', 'tags', 'tenant', 'VRF']
7175
writer = csv.DictWriter(file, fieldnames=fieldnames)
7276

7377
# Write header

nmap_scan_multi_dns.py

Lines changed: 15 additions & 9 deletions
Original file line numberDiff line numberDiff line change
@@ -14,6 +14,8 @@
1414
# Lock for writing to CSV file
1515
csv_lock = threading.Lock()
1616

17+
# Get the directory of the current script
18+
script_dir = os.path.dirname(os.path.abspath(__file__))
1719

1820
def read_from_csv(filename):
1921
"""
@@ -25,7 +27,8 @@ def read_from_csv(filename):
2527
Returns:
2628
- data (list): A list of dictionaries representing rows from the CSV file.
2729
"""
28-
with open(filename, 'r') as file:
30+
filepath = os.path.join(script_dir, filename)
31+
with open(filepath, 'r') as file:
2932
reader = csv.DictReader(file)
3033
data = [row for row in reader]
3134
return data
@@ -42,8 +45,9 @@ def remove_scanned_prefixes(data, scanned_prefixes):
4245
updated_data = [row for row in data if row['Prefix'] not in scanned_prefixes]
4346

4447
# Rewrite the updated data to the CSV file
45-
with open('ipam_prefixes.csv', 'w', newline='') as file:
46-
fieldnames = ['Prefix', 'VRF', 'Status', 'Tags', 'Tenant'] # Added 'VRF' to fieldnames
48+
filepath = os.path.join(script_dir, 'ipam_prefixes.csv')
49+
with open(filepath, 'w', newline='') as file:
50+
fieldnames = ['Prefix', 'VRF', 'Status', 'Tags', 'Tenant']
4751
writer = csv.DictWriter(file, fieldnames=fieldnames)
4852
writer.writeheader()
4953
writer.writerows(updated_data)
@@ -110,12 +114,15 @@ def run_nmap_on_prefixes(data, output_folder):
110114
results = []
111115
scanned_prefixes = []
112116

117+
# Create the full path for the output folder
118+
output_folder_path = os.path.join(script_dir, output_folder)
119+
113120
# Filter rows to scan only those with status 'active' and without the tag 'Disable Automatic Scanning'
114121
rows_to_scan = [row for row in data if row['Status'] == 'active' and 'Disable Automatic Scanning' not in row['Tags']]
115122

116123
script_start_time = datetime.now() # Get the script start time
117124

118-
with ThreadPoolExecutor(max_workers=5) as executor: # Adjust the max_workers parameter based on your system's capabilities
125+
with ThreadPoolExecutor(max_workers=5) as executor:
119126
# Use executor.map to asynchronously run the scans and get results
120127
futures = {executor.submit(run_nmap_on_prefix, row['Prefix'], row['Tenant'], row['VRF']): row for row in rows_to_scan}
121128

@@ -125,8 +132,7 @@ def run_nmap_on_prefixes(data, output_folder):
125132
with csv_lock:
126133
results.extend(prefix_results)
127134
scanned_prefixes.append(futures[future]['Prefix'])
128-
write_results_to_csv(prefix_results, output_folder, script_start_time) # Pass script start time
129-
135+
write_results_to_csv(prefix_results, output_folder_path, script_start_time)
130136

131137
remove_scanned_prefixes(data, scanned_prefixes)
132138
return results
@@ -151,8 +157,8 @@ def write_results_to_csv(results, output_folder, script_start_time):
151157
# Check if the file is empty
152158
is_empty = not os.path.exists(output_filename) or os.stat(output_filename).st_size == 0
153159

154-
with open(output_filename, 'a', newline='') as file: # Use 'a' (append) mode to add results to the file
155-
fieldnames = ['address', 'dns_name', 'status', 'tags', 'tenant', 'VRF', 'scantime'] # Added 'VRF' to fieldnames
160+
with open(output_filename, 'a', newline='') as file:
161+
fieldnames = ['address', 'dns_name', 'status', 'tags', 'tenant', 'VRF', 'scantime']
156162
writer = csv.DictWriter(file, fieldnames=fieldnames)
157163

158164
# Add headers if the file is empty
@@ -165,4 +171,4 @@ def write_results_to_csv(results, output_folder, script_start_time):
165171
if __name__ == "__main__":
166172
data = read_from_csv('ipam_prefixes.csv')
167173
output_folder = 'results'
168-
run_nmap_on_prefixes(data, output_folder)
174+
run_nmap_on_prefixes(data, output_folder)

0 commit comments

Comments
 (0)