Directory Browsing
TL;DR
Browse and navigate remote file systems using the CMDOP Python SDK. List directory contents with sorting and filtering, retrieve detailed file metadata, recursively search for files by name or date, visualize directory trees, monitor real-time file changes, and check disk usage and free space — all without shell commands.
Navigate and explore remote file systems.
How do I list files in a remote directory?
from cmdop import AsyncCMDOPClient
async with AsyncCMDOPClient.remote(api_key="cmd_xxx") as client:
# List all entries in the /var/log directory on the remote server
entries = await client.files.list("my-server", "/var/log")
for entry in entries:
print(f"{entry.name} - {entry.size} bytes")What properties does each directory entry have?
Each entry has:
entry = entries[0]
# Core file identification
entry.name # "app.log"
entry.path # "/var/log/app.log"
entry.size # 1234567 (bytes)
entry.mode # 0o644
# Type checks
entry.is_dir # False
entry.is_file # True
entry.is_symlink # False
# Timestamps and ownership
entry.modified_at # datetime
entry.created_at # datetime
entry.owner # "root"
entry.group # "root"How do I get a detailed listing with hidden files?
# Include hidden files (dotfiles) in the listing
entries = await client.files.list(
"my-server",
"/home/deploy",
include_hidden=True
)
# Retrieve extended metadata for each entry (similar to ls -la)
entries = await client.files.list(
"my-server",
"/var/log",
detailed=True
)
for e in entries:
# Convert numeric mode to octal string for display
mode = oct(e.mode)[2:]
print(f"{mode} {e.owner:10} {e.size:10} {e.name}")How do I list files recursively with a pattern filter?
# Traverse all subdirectories and list every file
entries = await client.files.list(
"my-server",
"/app",
recursive=True
)
# Recursively list only files matching a glob pattern
entries = await client.files.list(
"my-server",
"/app",
recursive=True,
pattern="*.py" # Only Python files
)How do I sort and filter directory listings?
# Sort entries by file size in descending order (largest first)
entries = await client.files.list(
"my-server",
"/var/log",
sort_by="size",
sort_desc=True
)
# Sort entries by last-modified timestamp (most recent first)
entries = await client.files.list(
"my-server",
"/var/log",
sort_by="modified",
sort_desc=True # Most recent first
)
# Filter to only return files larger than 1 MB
entries = await client.files.list(
"my-server",
"/var/log",
min_size=1024 * 1024 # Files > 1MB
)How do I find files by name, date, or size?
# Search for files matching a glob pattern across subdirectories
files = await client.files.find(
"my-server",
"/app",
pattern="*.log"
)
# Find files modified within the last 24 hours
from datetime import datetime, timedelta
files = await client.files.find(
"my-server",
"/var/log",
modified_after=datetime.now() - timedelta(hours=24)
)
# Find files larger than 100 MB anywhere on the server
files = await client.files.find(
"my-server",
"/",
min_size=100 * 1024 * 1024 # > 100MB
)How do I display a directory tree?
# Fetch a tree structure up to 3 levels deep
tree = await client.files.tree("my-server", "/app", depth=3)
# Recursively print the tree with indentation
def print_tree(node, indent=0):
print(" " * indent + node.name)
for child in node.children:
print_tree(child, indent + 1)
print_tree(tree)Output:
app
src
main.py
utils.py
lib
helpers.py
config.yaml
requirements.txtHow do I check disk usage for a directory?
# Get total size of all files in the directory
size = await client.files.du("my-server", "/var/log")
print(f"Log directory: {size / 1024 / 1024:.2f} MB")
# Get per-subdirectory size breakdown and show top 10 largest
usage = await client.files.du("my-server", "/app", breakdown=True)
for path, size in sorted(usage.items(), key=lambda x: -x[1])[:10]:
print(f"{size / 1024 / 1024:.2f} MB {path}")How do I check available disk space?
# Query filesystem disk space (similar to the df command)
disk = await client.files.df("my-server", "/")
print(f"Total: {disk.total / 1024**3:.1f} GB")
print(f"Used: {disk.used / 1024**3:.1f} GB")
print(f"Free: {disk.free / 1024**3:.1f} GB")
print(f"Percent: {disk.percent}%")How do I watch a directory for real-time file changes?
# Subscribe to file-system events and react to changes in real time
async def watch_logs():
async for event in client.files.watch("my-server", "/var/log"):
if event.type == "created":
print(f"New file: {event.path}")
elif event.type == "modified":
print(f"Changed: {event.path}")
elif event.type == "deleted":
print(f"Deleted: {event.path}")
# Run the watcher with a 60-second timeout
await asyncio.wait_for(watch_logs(), timeout=60)How do I handle symlinks when listing files?
# List entries and resolve symlinks to their actual targets
entries = await client.files.list(
"my-server",
"/usr/bin",
follow_symlinks=True
)
# Read where a specific symlink points to
target = await client.files.readlink("my-server", "/usr/bin/python")
print(f"python -> {target}") # python -> python3.11What are common directory browsing patterns?
How do I find recent log files?
# Search for .log files modified in the last 7 days
logs = await client.files.find(
"my-server",
"/var/log",
pattern="*.log",
modified_after=datetime.now() - timedelta(days=7)
)
for log in logs:
print(f"{log.name}: {log.size / 1024:.1f} KB")How do I clean up old temporary files?
# Find files that haven't been modified in over 30 days
old_files = await client.files.find(
"my-server",
"/tmp",
modified_before=datetime.now() - timedelta(days=30)
)
# Calculate how much space the old files occupy
total_size = sum(f.size for f in old_files)
print(f"Found {len(old_files)} files, {total_size / 1024 / 1024:.1f} MB")
# Delete each stale file
for f in old_files:
await client.files.delete("my-server", f.path)How do I find duplicate files on a remote server?
from collections import defaultdict
# Group all files by their size to identify potential duplicates
by_size = defaultdict(list)
files = await client.files.find("my-server", "/data", min_size=1024)
for f in files:
by_size[f.size].append(f)
# For files with matching sizes, compare checksums to confirm duplicates
for size, group in by_size.items():
if len(group) > 1:
checksums = {}
for f in group:
cs = await client.files.checksum("my-server", f.path)
if cs in checksums:
print(f"Duplicate: {f.path} == {checksums[cs]}")
checksums[cs] = f.pathHow do I get aggregate statistics for a directory?
async def dir_stats(hostname, path):
# Recursively fetch all entries under the given path
entries = await client.files.list(hostname, path, recursive=True)
# Separate files from directories
files = [e for e in entries if e.is_file]
dirs = [e for e in entries if e.is_dir]
# Sum file sizes and tally extensions
total_size = sum(e.size for e in files)
extensions = defaultdict(int)
for f in files:
ext = f.name.rsplit(".", 1)[-1] if "." in f.name else "no-ext"
extensions[ext] += 1
return {
"total_files": len(./files),
"total_dirs": len(dirs),
"total_size": total_size,
"by_extension": dict(extensions)
}
stats = await dir_stats("my-server", "/app")
print(f"Files: {stats['total_files']}")
print(f"Size: {stats['total_size'] / 1024 / 1024:.1f} MB")Next
- Read & Write — File content operations
- AI Agent — AI automation
Last updated on