|
1 | 1 | import csv
|
2 | 2 | import os
|
3 |
| -from concurrent.futures import ThreadPoolExecutor |
| 3 | +import logging |
| 4 | +from concurrent.futures import ThreadPoolExecutor, as_completed |
4 | 5 | import configparser
|
5 | 6 | import pynetbox
|
6 | 7 | from tqdm import tqdm
|
7 | 8 | from netbox_connection import connect_to_netbox
|
| 9 | +import urllib3 |
| 10 | + |
| 11 | +urllib3.disable_warnings(urllib3.exceptions.InsecureRequestWarning) |
8 | 12 |
|
9 | 13 | # Get the directory of the current script
|
10 | 14 | script_dir = os.path.dirname(os.path.abspath(__file__))
|
11 | 15 |
|
| 16 | +# Set up logging |
| 17 | +logging.basicConfig( |
| 18 | + level=logging.INFO, |
| 19 | + format='%(asctime)s - %(levelname)s - %(message)s', |
| 20 | + handlers=[ |
| 21 | + logging.FileHandler(os.path.join(script_dir, 'netbox_import.log')) |
| 22 | + ] |
| 23 | +) |
| 24 | +logger = logging.getLogger(__name__) |
| 25 | + |
12 | 26 | def process_row(row, pbar):
|
13 | 27 | """
|
14 | 28 | Process a single row from the CSV file and update/create IP addresses in Netbox.
|
15 |
| -
|
16 | 29 | Args:
|
17 | 30 | - row (dict): A dictionary representing a single row from the CSV file.
|
18 | 31 | - pbar (tqdm.tqdm): Progress bar to update the progress of processing rows.
|
19 | 32 | """
|
20 |
| - # Convert 'tags' from a comma-separated string to a list of dictionaries |
21 |
| - tags_list = [{'name': tag.strip()} for tag in row['tags'].split(',')] |
| 33 | + try: |
| 34 | + logger.info(f"Processing address: {row['address']}") |
| 35 | + |
| 36 | + # Convert 'tags' from a comma-separated string to a list of dictionaries |
| 37 | + tags_list = [{'name': tag.strip()} for tag in row['tags'].split(',')] |
| 38 | + logger.debug(f"Tags for {row['address']}: {tags_list}") |
22 | 39 |
|
23 |
| - # Assuming you're writing to the 'ipam' endpoint, replace with the correct endpoint if not |
24 |
| - existing_address = netbox.ipam.ip_addresses.get(address=row['address']) |
25 |
| - |
26 |
| - if existing_address: |
27 |
| - # Update the existing address |
28 |
| - existing_address.status = row['status'] |
29 |
| - existing_address.custom_fields = {'scantime': row['scantime']} # Changed 'description' to 'scantime' |
30 |
| - existing_address.dns_name = row['dns_name'] |
31 |
| - existing_address.tags = tags_list |
32 |
| - if row['tenant'] != 'N/A': # Check if tenant is not 'N/A' |
33 |
| - existing_address.tenant = {'name': row['tenant']} |
34 |
| - if row['VRF'] != 'N/A': # Check if VRF is not 'N/A' |
35 |
| - existing_address.vrf = {'name': row['VRF']} |
36 |
| - existing_address.save() |
37 |
| - else: |
38 |
| - try: |
39 |
| - # Create a new address if it doesn't exist |
40 |
| - tenant_data = {'name': row['tenant']} if row['tenant'] != 'N/A' else None |
41 |
| - vrf_data = {'name': row['VRF']} if row['VRF'] != 'N/A' else None |
42 |
| - netbox.ipam.ip_addresses.create( |
43 |
| - address=row['address'], |
44 |
| - status=row['status'], |
45 |
| - custom_fields={'scantime': row['scantime']}, # Changed 'description' to 'scantime' |
46 |
| - dns_name=row['dns_name'], |
47 |
| - tags=tags_list, |
48 |
| - tenant=tenant_data, |
49 |
| - vrf=vrf_data |
50 |
| - ) |
51 |
| - except pynetbox.core.query.RequestError as e: |
52 |
| - # Handle duplicate address error |
53 |
| - if 'Duplicate IP address' in str(e): |
54 |
| - None |
55 |
| - else: |
56 |
| - # Propagate other errors |
| 40 | + # Attempting to get existing address |
| 41 | + existing_address = netbox.ipam.ip_addresses.get(address=row['address']) |
| 42 | + |
| 43 | + if existing_address: |
| 44 | + logger.info(f"Updating existing address: {row['address']}") |
| 45 | + try: |
| 46 | + # Update the existing address |
| 47 | + existing_address.status = row['status'] |
| 48 | + existing_address.custom_fields = {'scantime': row['scantime']} |
| 49 | + existing_address.dns_name = row['dns_name'] |
| 50 | + existing_address.tags = tags_list |
| 51 | + |
| 52 | + if row['tenant'] != 'N/A': |
| 53 | + existing_address.tenant = {'name': row['tenant']} |
| 54 | + if row['VRF'] != 'N/A': |
| 55 | + existing_address.vrf = {'name': row['VRF']} |
| 56 | + |
| 57 | + existing_address.save() |
| 58 | + logger.info(f"Successfully updated address: {row['address']}") |
| 59 | + |
| 60 | + except Exception as e: |
| 61 | + logger.error(f"Error updating address {row['address']}: {str(e)}") |
57 | 62 | raise
|
58 |
| - |
59 |
| - # Update progress bar for each processed row |
60 |
| - pbar.update(1) |
| 63 | + |
| 64 | + else: |
| 65 | + logger.info(f"Creating new address: {row['address']}") |
| 66 | + try: |
| 67 | + # Create a new address if it doesn't exist |
| 68 | + tenant_data = {'name': row['tenant']} if row['tenant'] != 'N/A' else None |
| 69 | + vrf_data = {'name': row['VRF']} if row['VRF'] != 'N/A' else None |
| 70 | + |
| 71 | + netbox.ipam.ip_addresses.create( |
| 72 | + address=row['address'], |
| 73 | + status=row['status'], |
| 74 | + custom_fields={'scantime': row['scantime']}, |
| 75 | + dns_name=row['dns_name'], |
| 76 | + tags=tags_list, |
| 77 | + tenant=tenant_data, |
| 78 | + vrf=vrf_data |
| 79 | + ) |
| 80 | + logger.info(f"Successfully created address: {row['address']}") |
| 81 | + |
| 82 | + except pynetbox.core.query.RequestError as e: |
| 83 | + if 'Duplicate IP address' in str(e): |
| 84 | + logger.warning(f"Duplicate IP address found: {row['address']}") |
| 85 | + else: |
| 86 | + logger.error(f"Error creating address {row['address']}: {str(e)}") |
| 87 | + raise |
| 88 | + except Exception as e: |
| 89 | + logger.error(f"Unexpected error creating address {row['address']}: {str(e)}") |
| 90 | + raise |
| 91 | + |
| 92 | + except Exception as e: |
| 93 | + logger.error(f"Failed to process row for address {row.get('address', 'unknown')}: {str(e)}") |
| 94 | + raise |
| 95 | + finally: |
| 96 | + # Update progress bar for each processed row |
| 97 | + pbar.update(1) |
61 | 98 |
|
62 | 99 | def write_data_to_netbox(url, token, csv_file):
|
63 | 100 | """
|
64 | 101 | Write data from a CSV file to Netbox.
|
65 |
| -
|
66 | 102 | Args:
|
67 | 103 | - url (str): The base URL of the Netbox instance.
|
68 | 104 | - token (str): The authentication token for accessing the Netbox API.
|
69 | 105 | - csv_file (str): Path to the CSV file containing data to be written to Netbox.
|
70 | 106 | """
|
71 | 107 | global netbox
|
72 |
| - netbox = connect_to_netbox(url, token) |
73 |
| - |
74 |
| - csv_file_path = os.path.join(script_dir, csv_file) |
75 |
| - with open(csv_file_path, 'r') as file: |
76 |
| - reader = csv.DictReader(file) |
77 |
| - rows = list(reader) |
78 |
| - |
79 |
| - total_rows = len(rows) |
80 |
| - with tqdm(total=total_rows, desc="Processing Rows") as pbar: |
81 |
| - with ThreadPoolExecutor(max_workers=5) as executor: # Adjust max_workers as needed |
82 |
| - futures = [executor.submit(process_row, row, pbar) for row in rows] |
83 |
| - # Wait for all futures to complete |
84 |
| - for future in futures: |
85 |
| - future.result() |
86 |
| - |
87 |
| -# Read URL and token from var.ini |
88 |
| -config = configparser.ConfigParser() |
89 |
| -config.read(os.path.join(script_dir, 'var.ini')) |
90 |
| -url = config['credentials']['url'] |
91 |
| -token = config['credentials']['token'] |
| 108 | + try: |
| 109 | + logger.info("Connecting to Netbox...") |
| 110 | + netbox = connect_to_netbox(url, token) |
| 111 | + logger.info("Successfully connected to Netbox") |
| 112 | + |
| 113 | + csv_file_path = os.path.join(script_dir, csv_file) |
| 114 | + logger.info(f"Reading CSV file: {csv_file_path}") |
| 115 | + |
| 116 | + with open(csv_file_path, 'r') as file: |
| 117 | + reader = csv.DictReader(file) |
| 118 | + rows = list(reader) |
| 119 | + total_rows = len(rows) |
| 120 | + logger.info(f"Found {total_rows} rows to process") |
| 121 | + |
| 122 | + with tqdm(total=total_rows, desc="Processing Rows") as pbar: |
| 123 | + with ThreadPoolExecutor(max_workers=5) as executor: |
| 124 | + # Submit all tasks and store futures |
| 125 | + futures = [executor.submit(process_row, row, pbar) for row in rows] |
| 126 | + |
| 127 | + # Wait for completion and handle any exceptions |
| 128 | + for future in as_completed(futures): |
| 129 | + try: |
| 130 | + future.result() # This will raise any exceptions from the future |
| 131 | + except Exception as e: |
| 132 | + logger.error(f"Error processing row: {str(e)}") |
| 133 | + # Continue processing other rows even if one fails |
| 134 | + continue |
| 135 | + |
| 136 | + logger.info("Completed processing all rows") |
| 137 | + |
| 138 | + except Exception as e: |
| 139 | + logger.error(f"Fatal error in write_data_to_netbox: {str(e)}") |
| 140 | + raise |
92 | 141 |
|
93 |
| -write_data_to_netbox(url, token, 'ipam_addresses.csv') |
| 142 | +if __name__ == "__main__": |
| 143 | + try: |
| 144 | + # Read URL and token from var.ini |
| 145 | + config = configparser.ConfigParser() |
| 146 | + config.read(os.path.join(script_dir, 'var.ini')) |
| 147 | + url = config['credentials']['url'] |
| 148 | + token = config['credentials']['token'] |
| 149 | + |
| 150 | + logger.info("Starting Netbox import process") |
| 151 | + write_data_to_netbox(url, token, 'ipam_addresses.csv') |
| 152 | + logger.info("Netbox import process completed successfully") |
| 153 | + |
| 154 | + except Exception as e: |
| 155 | + logger.error(f"Script failed: {str(e)}") |
| 156 | + raise |
0 commit comments