Initial commit
This commit is contained in:
116
ods_to_json.py
Normal file
116
ods_to_json.py
Normal file
@@ -0,0 +1,116 @@
|
||||
from datetime import datetime
|
||||
import pandas as pd # type: ignore
|
||||
import json
|
||||
import os
|
||||
|
||||
# Load ODS file
|
||||
df = pd.read_excel('data.ods', engine='odf')
|
||||
|
||||
# Create node list (convert each row to a dictionary)
|
||||
nodes = []
|
||||
|
||||
# Function to clean dictionary values
|
||||
def clean_dict(d):
|
||||
"""Remove keys with None values from a dictionary."""
|
||||
# Ensure that we are dealing with single values or lists (no arrays)
|
||||
cleaned_dict = {}
|
||||
for k, v in d.items():
|
||||
if isinstance(v, list): # If the value is a list, check each item for validity
|
||||
v = [item for item in v if pd.notna(item) and item is not None]
|
||||
if v: # Only include non-empty lists
|
||||
cleaned_dict[k] = v
|
||||
elif pd.notna(v) and v is not None:
|
||||
cleaned_dict[k] = v
|
||||
return cleaned_dict
|
||||
|
||||
# Iterate over rows to create nodes
|
||||
for _, row in df.iterrows():
|
||||
# Handle the "image" field to check for multiple images
|
||||
image_value = str(row["image"]).strip() if pd.notna(row["image"]) else None
|
||||
|
||||
if image_value:
|
||||
if ',' in image_value:
|
||||
# Split multiple image paths into a list
|
||||
images = [image.strip() for image in image_value.split(',')]
|
||||
else:
|
||||
# Single image, store as a string
|
||||
images = image_value
|
||||
else:
|
||||
images = None # No images if the field is empty or NaN
|
||||
|
||||
node = {
|
||||
"id": str(row["id"]), # Ensure ID is a string
|
||||
"name": row["name"],
|
||||
"description": row["description"],
|
||||
"category": row["category"],
|
||||
"location": {
|
||||
"country": row["country"],
|
||||
"state": row["state"],
|
||||
"city": row["city"],
|
||||
},
|
||||
"group": row["country"].split()[0], # Only get the 3 letter code without emoji
|
||||
"image": images, # Store image(s) (single or list)
|
||||
"link": row["link"],
|
||||
}
|
||||
|
||||
# Clean the node dictionary before appending
|
||||
nodes.append(clean_dict(node))
|
||||
|
||||
# Create a dictionary to store nodes by location for linking
|
||||
location_groups = {}
|
||||
|
||||
for node in nodes:
|
||||
country = node["location"]["country"].split()[0] # Only get the 3 letter code without emoji
|
||||
state = node["location"]["state"]
|
||||
city = node["location"]["city"]
|
||||
node_id = node["id"]
|
||||
|
||||
# Create hierarchical keys for grouping
|
||||
country_key = country
|
||||
state_key = f"{country}|{state}"
|
||||
city_key = f"{country}|{state}|{city}"
|
||||
|
||||
# Initialize lists if they don't exist
|
||||
location_groups.setdefault(country_key, []).append(node_id)
|
||||
location_groups.setdefault(state_key, []).append(node_id)
|
||||
location_groups.setdefault(city_key, []).append(node_id)
|
||||
|
||||
# Generate links based on hierarchical grouping
|
||||
links = []
|
||||
|
||||
def create_links(node_list, strength):
|
||||
"""Create links between nodes in the same group"""
|
||||
for i in range(len(node_list)):
|
||||
for j in range(i + 1, len(node_list)):
|
||||
links.append({
|
||||
"source": node_list[i],
|
||||
"target": node_list[j],
|
||||
"strength": strength
|
||||
})
|
||||
|
||||
# Apply links at different levels
|
||||
for city_nodes in location_groups.values():
|
||||
if len(city_nodes) > 1:
|
||||
create_links(city_nodes, strength=3) # Strongest links
|
||||
|
||||
for state_nodes in location_groups.values():
|
||||
if len(state_nodes) > 1:
|
||||
create_links(state_nodes, strength=2) # Medium links
|
||||
|
||||
for country_nodes in location_groups.values():
|
||||
if len(country_nodes) > 1:
|
||||
create_links(country_nodes, strength=1) # Weakest links
|
||||
|
||||
# Final JSON structure
|
||||
graph_data = {"nodes": nodes, "links": links}
|
||||
|
||||
# Save to JSON
|
||||
# Format: graph_data_YYYYMMDD_HHMMSS.json
|
||||
timestamp = datetime.now().strftime("%Y%m%d_%H%M%S")
|
||||
output_file = f"graph_data__{timestamp}.json"
|
||||
with open(output_file, "w") as f:
|
||||
json.dump(graph_data, f, indent=4)
|
||||
|
||||
# Print success message and run macOS sound command when script finishes
|
||||
print(f"\n✅ JSON file '{output_file}' generated successfully!")
|
||||
os.system("afplay /System/Library/Sounds/Glass.aiff")
|
||||
Reference in New Issue
Block a user