Skip to content

Commit

Permalink
chore(cfe): more detail to loader
Browse files Browse the repository at this point in the history
  • Loading branch information
danellecline committed Oct 21, 2024
1 parent b1d9062 commit 9bcec3c
Showing 1 changed file with 38 additions and 6 deletions.
44 changes: 38 additions & 6 deletions aipipeline/projects/cfe/load_isiis_mine.py
Original file line number Diff line number Diff line change
@@ -1,9 +1,14 @@
import logging
import os
import subprocess
import sys
from datetime import datetime
from pathlib import Path

import pandas as pd

from aipipeline.docker.utils import run_docker

logger = logging.getLogger(__name__)
formatter = logging.Formatter("%(asctime)s %(levelname)s %(message)s")
# Also log to the console
Expand All @@ -18,6 +23,12 @@
handler.setLevel(logging.DEBUG)
logger.addHandler(handler)

TATOR_TOKEN = os.getenv("TATOR_TOKEN")

if not TATOR_TOKEN:
logger.error("TATOR_TOKEN environment variable not set")
sys.exit(1)

if __name__ == "__main__":
import multiprocessing
import time
Expand All @@ -30,15 +41,36 @@

# Read in all the csv files into a pandas dataframe
# This will be used to filter the images that need to be processed
df = pd.concat([pd.read_csv(f) for f in Path.cwd().rglob("*.csv")], ignore_index=True)
df_all = pd.DataFrame()
for f in (out_path / 'csv').rglob("*.csv"):
logger.info(f"Reading {f}")
if pd.read_csv(f).shape[0] == 0:
logger.info(f"Skipping {f} as it is empty")
continue
df = pd.read_csv(f)
df_all = pd.concat([df_all, df], ignore_index=True)

# Find the unique image paths
image_paths = df['image_path'].unique()
# Find the unique image paths and load the media
image_paths = df_all['image_path'].unique()
project = "902111-CFE"
section = "mine_depth_v1"

# For each image path, load the media
for image_path in image_paths:
# Load the media
pass
args = [
"load",
"images",
"--input",
image_path,
"--config",
f"/tmp/{project}/config.yml",
"--token",
TATOR_TOKEN,
"--section",
section,
]
command = "python -m aidata " + " ".join(args)
logger.info(f"Running {' '.join(args)}")
subprocess.run(command, shell=True)

time_end = time.time()
logger.info(f"total processing time: {time_end - time_start}")

0 comments on commit 9bcec3c

Please sign in to comment.