refactor(indentation): Apply codebase-wide 1-space ultra-compact refactor. Formatted 21 core modules and tests.
This commit is contained in:
308
mcp_client.py
308
mcp_client.py
@@ -520,175 +520,167 @@ def get_git_diff(path: str, base_rev: str = "HEAD", head_rev: str = "") -> str:
|
||||
return f"ERROR running git diff: {e.stderr}"
|
||||
except Exception as e:
|
||||
return f"ERROR: {e}"
|
||||
|
||||
|
||||
def py_find_usages(path: str, name: str) -> str:
|
||||
"""Finds exact string matches of a symbol in a given file or directory."""
|
||||
p, err = _resolve_and_check(path)
|
||||
if err: return err
|
||||
try:
|
||||
import re
|
||||
pattern = re.compile(r"\b" + re.escape(name) + r"\b")
|
||||
results = []
|
||||
def _search_file(fp):
|
||||
if fp.name == "history.toml" or fp.name.endswith("_history.toml"): return
|
||||
if not _is_allowed(fp): return
|
||||
try:
|
||||
text = fp.read_text(encoding="utf-8")
|
||||
lines = text.splitlines()
|
||||
for i, line in enumerate(lines, 1):
|
||||
if pattern.search(line):
|
||||
rel = fp.relative_to(_primary_base_dir if _primary_base_dir else Path.cwd())
|
||||
results.append(f"{rel}:{i}: {line.strip()[:100]}")
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
if p.is_file():
|
||||
_search_file(p)
|
||||
else:
|
||||
for root, dirs, files in os.walk(p):
|
||||
dirs[:] = [d for d in dirs if not d.startswith('.') and d not in ('__pycache__', 'venv', 'env')]
|
||||
for file in files:
|
||||
if file.endswith(('.py', '.md', '.toml', '.txt', '.json')):
|
||||
_search_file(Path(root) / file)
|
||||
|
||||
if not results:
|
||||
return f"No usages found for '{name}' in {p}"
|
||||
if len(results) > 100:
|
||||
return "\n".join(results[:100]) + f"\n... (and {len(results)-100} more)"
|
||||
return "\n".join(results)
|
||||
except Exception as e:
|
||||
return f"ERROR finding usages for '{name}': {e}"
|
||||
"""Finds exact string matches of a symbol in a given file or directory."""
|
||||
p, err = _resolve_and_check(path)
|
||||
if err: return err
|
||||
try:
|
||||
import re
|
||||
pattern = re.compile(r"\b" + re.escape(name) + r"\b")
|
||||
results = []
|
||||
|
||||
def _search_file(fp):
|
||||
if fp.name == "history.toml" or fp.name.endswith("_history.toml"): return
|
||||
if not _is_allowed(fp): return
|
||||
try:
|
||||
text = fp.read_text(encoding="utf-8")
|
||||
lines = text.splitlines()
|
||||
for i, line in enumerate(lines, 1):
|
||||
if pattern.search(line):
|
||||
rel = fp.relative_to(_primary_base_dir if _primary_base_dir else Path.cwd())
|
||||
results.append(f"{rel}:{i}: {line.strip()[:100]}")
|
||||
except Exception:
|
||||
pass
|
||||
if p.is_file():
|
||||
_search_file(p)
|
||||
else:
|
||||
for root, dirs, files in os.walk(p):
|
||||
dirs[:] = [d for d in dirs if not d.startswith('.') and d not in ('__pycache__', 'venv', 'env')]
|
||||
for file in files:
|
||||
if file.endswith(('.py', '.md', '.toml', '.txt', '.json')):
|
||||
_search_file(Path(root) / file)
|
||||
if not results:
|
||||
return f"No usages found for '{name}' in {p}"
|
||||
if len(results) > 100:
|
||||
return "\n".join(results[:100]) + f"\n... (and {len(results)-100} more)"
|
||||
return "\n".join(results)
|
||||
except Exception as e:
|
||||
return f"ERROR finding usages for '{name}': {e}"
|
||||
|
||||
def py_get_imports(path: str) -> str:
|
||||
"""Parses a file's AST and returns a strict list of its dependencies."""
|
||||
p, err = _resolve_and_check(path)
|
||||
if err: return err
|
||||
if not p.is_file() or p.suffix != ".py": return f"ERROR: not a python file: {path}"
|
||||
try:
|
||||
import ast
|
||||
code = p.read_text(encoding="utf-8")
|
||||
tree = ast.parse(code)
|
||||
imports = []
|
||||
for node in tree.body:
|
||||
if isinstance(node, ast.Import):
|
||||
for alias in node.names:
|
||||
imports.append(alias.name)
|
||||
elif isinstance(node, ast.ImportFrom):
|
||||
module = node.module or ""
|
||||
for alias in node.names:
|
||||
imports.append(f"{module}.{alias.name}" if module else alias.name)
|
||||
if not imports: return "No imports found."
|
||||
return "Imports:\n" + "\n".join(f" - {i}" for i in imports)
|
||||
except Exception as e:
|
||||
return f"ERROR getting imports for '{path}': {e}"
|
||||
"""Parses a file's AST and returns a strict list of its dependencies."""
|
||||
p, err = _resolve_and_check(path)
|
||||
if err: return err
|
||||
if not p.is_file() or p.suffix != ".py": return f"ERROR: not a python file: {path}"
|
||||
try:
|
||||
import ast
|
||||
code = p.read_text(encoding="utf-8")
|
||||
tree = ast.parse(code)
|
||||
imports = []
|
||||
for node in tree.body:
|
||||
if isinstance(node, ast.Import):
|
||||
for alias in node.names:
|
||||
imports.append(alias.name)
|
||||
elif isinstance(node, ast.ImportFrom):
|
||||
module = node.module or ""
|
||||
for alias in node.names:
|
||||
imports.append(f"{module}.{alias.name}" if module else alias.name)
|
||||
if not imports: return "No imports found."
|
||||
return "Imports:\n" + "\n".join(f" - {i}" for i in imports)
|
||||
except Exception as e:
|
||||
return f"ERROR getting imports for '{path}': {e}"
|
||||
|
||||
def py_check_syntax(path: str) -> str:
|
||||
"""Runs a quick syntax check on a Python file."""
|
||||
p, err = _resolve_and_check(path)
|
||||
if err: return err
|
||||
if not p.is_file() or p.suffix != ".py": return f"ERROR: not a python file: {path}"
|
||||
try:
|
||||
import ast
|
||||
code = p.read_text(encoding="utf-8")
|
||||
ast.parse(code)
|
||||
return f"Syntax OK: {path}"
|
||||
except SyntaxError as e:
|
||||
return f"SyntaxError in {path} at line {e.lineno}, offset {e.offset}: {e.msg}\n{e.text}"
|
||||
except Exception as e:
|
||||
return f"ERROR checking syntax for '{path}': {e}"
|
||||
"""Runs a quick syntax check on a Python file."""
|
||||
p, err = _resolve_and_check(path)
|
||||
if err: return err
|
||||
if not p.is_file() or p.suffix != ".py": return f"ERROR: not a python file: {path}"
|
||||
try:
|
||||
import ast
|
||||
code = p.read_text(encoding="utf-8")
|
||||
ast.parse(code)
|
||||
return f"Syntax OK: {path}"
|
||||
except SyntaxError as e:
|
||||
return f"SyntaxError in {path} at line {e.lineno}, offset {e.offset}: {e.msg}\n{e.text}"
|
||||
except Exception as e:
|
||||
return f"ERROR checking syntax for '{path}': {e}"
|
||||
|
||||
def py_get_hierarchy(path: str, class_name: str) -> str:
|
||||
"""Scans the project to find subclasses of a given class."""
|
||||
p, err = _resolve_and_check(path)
|
||||
if err: return err
|
||||
import ast
|
||||
subclasses = []
|
||||
|
||||
def _search_file(fp):
|
||||
if not _is_allowed(fp): return
|
||||
try:
|
||||
code = fp.read_text(encoding="utf-8")
|
||||
tree = ast.parse(code)
|
||||
for node in ast.walk(tree):
|
||||
if isinstance(node, ast.ClassDef):
|
||||
for base in node.bases:
|
||||
if isinstance(base, ast.Name) and base.id == class_name:
|
||||
subclasses.append(f"{fp.name}: class {node.name}({class_name})")
|
||||
elif isinstance(base, ast.Attribute) and base.attr == class_name:
|
||||
subclasses.append(f"{fp.name}: class {node.name}({base.value.id}.{class_name})")
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
try:
|
||||
if p.is_file():
|
||||
_search_file(p)
|
||||
else:
|
||||
for root, dirs, files in os.walk(p):
|
||||
dirs[:] = [d for d in dirs if not d.startswith('.') and d not in ('__pycache__', 'venv', 'env')]
|
||||
for file in files:
|
||||
if file.endswith('.py'):
|
||||
_search_file(Path(root) / file)
|
||||
|
||||
if not subclasses:
|
||||
return f"No subclasses of '{class_name}' found in {p}"
|
||||
return f"Subclasses of '{class_name}':\n" + "\n".join(f" - {s}" for s in subclasses)
|
||||
except Exception as e:
|
||||
return f"ERROR finding subclasses of '{class_name}': {e}"
|
||||
"""Scans the project to find subclasses of a given class."""
|
||||
p, err = _resolve_and_check(path)
|
||||
if err: return err
|
||||
import ast
|
||||
subclasses = []
|
||||
|
||||
def _search_file(fp):
|
||||
if not _is_allowed(fp): return
|
||||
try:
|
||||
code = fp.read_text(encoding="utf-8")
|
||||
tree = ast.parse(code)
|
||||
for node in ast.walk(tree):
|
||||
if isinstance(node, ast.ClassDef):
|
||||
for base in node.bases:
|
||||
if isinstance(base, ast.Name) and base.id == class_name:
|
||||
subclasses.append(f"{fp.name}: class {node.name}({class_name})")
|
||||
elif isinstance(base, ast.Attribute) and base.attr == class_name:
|
||||
subclasses.append(f"{fp.name}: class {node.name}({base.value.id}.{class_name})")
|
||||
except Exception:
|
||||
pass
|
||||
try:
|
||||
if p.is_file():
|
||||
_search_file(p)
|
||||
else:
|
||||
for root, dirs, files in os.walk(p):
|
||||
dirs[:] = [d for d in dirs if not d.startswith('.') and d not in ('__pycache__', 'venv', 'env')]
|
||||
for file in files:
|
||||
if file.endswith('.py'):
|
||||
_search_file(Path(root) / file)
|
||||
if not subclasses:
|
||||
return f"No subclasses of '{class_name}' found in {p}"
|
||||
return f"Subclasses of '{class_name}':\n" + "\n".join(f" - {s}" for s in subclasses)
|
||||
except Exception as e:
|
||||
return f"ERROR finding subclasses of '{class_name}': {e}"
|
||||
|
||||
def py_get_docstring(path: str, name: str) -> str:
|
||||
"""Extracts the docstring for a specific module, class, or function."""
|
||||
p, err = _resolve_and_check(path)
|
||||
if err: return err
|
||||
if not p.is_file() or p.suffix != ".py": return f"ERROR: not a python file: {path}"
|
||||
try:
|
||||
import ast
|
||||
code = p.read_text(encoding="utf-8")
|
||||
tree = ast.parse(code)
|
||||
if not name or name == "module":
|
||||
doc = ast.get_docstring(tree)
|
||||
return doc if doc else "No module docstring found."
|
||||
|
||||
node = _get_symbol_node(tree, name)
|
||||
if not node: return f"ERROR: could not find symbol '{name}' in {path}"
|
||||
doc = ast.get_docstring(node)
|
||||
return doc if doc else f"No docstring found for '{name}'."
|
||||
except Exception as e:
|
||||
return f"ERROR getting docstring for '{name}': {e}"
|
||||
"""Extracts the docstring for a specific module, class, or function."""
|
||||
p, err = _resolve_and_check(path)
|
||||
if err: return err
|
||||
if not p.is_file() or p.suffix != ".py": return f"ERROR: not a python file: {path}"
|
||||
try:
|
||||
import ast
|
||||
code = p.read_text(encoding="utf-8")
|
||||
tree = ast.parse(code)
|
||||
if not name or name == "module":
|
||||
doc = ast.get_docstring(tree)
|
||||
return doc if doc else "No module docstring found."
|
||||
node = _get_symbol_node(tree, name)
|
||||
if not node: return f"ERROR: could not find symbol '{name}' in {path}"
|
||||
doc = ast.get_docstring(node)
|
||||
return doc if doc else f"No docstring found for '{name}'."
|
||||
except Exception as e:
|
||||
return f"ERROR getting docstring for '{name}': {e}"
|
||||
|
||||
def get_tree(path: str, max_depth: int = 2) -> str:
|
||||
"""Returns a directory structure up to a max depth."""
|
||||
p, err = _resolve_and_check(path)
|
||||
if err: return err
|
||||
if not p.is_dir(): return f"ERROR: not a directory: {path}"
|
||||
|
||||
try:
|
||||
max_depth = int(max_depth)
|
||||
def _build_tree(dir_path, current_depth, prefix=""):
|
||||
if current_depth > max_depth: return []
|
||||
lines = []
|
||||
try:
|
||||
entries = sorted(dir_path.iterdir(), key=lambda e: (e.is_file(), e.name.lower()))
|
||||
except PermissionError:
|
||||
return []
|
||||
|
||||
# Filter
|
||||
entries = [e for e in entries if not e.name.startswith('.') and e.name not in ('__pycache__', 'venv', 'env') and e.name != "history.toml" and not e.name.endswith("_history.toml")]
|
||||
|
||||
for i, entry in enumerate(entries):
|
||||
is_last = (i == len(entries) - 1)
|
||||
connector = "└── " if is_last else "├── "
|
||||
lines.append(f"{prefix}{connector}{entry.name}")
|
||||
if entry.is_dir():
|
||||
extension = " " if is_last else "│ "
|
||||
lines.extend(_build_tree(entry, current_depth + 1, prefix + extension))
|
||||
return lines
|
||||
"""Returns a directory structure up to a max depth."""
|
||||
p, err = _resolve_and_check(path)
|
||||
if err: return err
|
||||
if not p.is_dir(): return f"ERROR: not a directory: {path}"
|
||||
try:
|
||||
max_depth = int(max_depth)
|
||||
|
||||
tree_lines = [f"{p.name}/"] + _build_tree(p, 1)
|
||||
return "\n".join(tree_lines)
|
||||
except Exception as e:
|
||||
return f"ERROR generating tree for '{path}': {e}"
|
||||
|
||||
# ------------------------------------------------------------------ web tools
|
||||
def _build_tree(dir_path, current_depth, prefix=""):
|
||||
if current_depth > max_depth: return []
|
||||
lines = []
|
||||
try:
|
||||
entries = sorted(dir_path.iterdir(), key=lambda e: (e.is_file(), e.name.lower()))
|
||||
except PermissionError:
|
||||
return []
|
||||
# Filter
|
||||
entries = [e for e in entries if not e.name.startswith('.') and e.name not in ('__pycache__', 'venv', 'env') and e.name != "history.toml" and not e.name.endswith("_history.toml")]
|
||||
for i, entry in enumerate(entries):
|
||||
is_last = (i == len(entries) - 1)
|
||||
connector = "└── " if is_last else "├── "
|
||||
lines.append(f"{prefix}{connector}{entry.name}")
|
||||
if entry.is_dir():
|
||||
extension = " " if is_last else "│ "
|
||||
lines.extend(_build_tree(entry, current_depth + 1, prefix + extension))
|
||||
return lines
|
||||
tree_lines = [f"{p.name}/"] + _build_tree(p, 1)
|
||||
return "\n".join(tree_lines)
|
||||
except Exception as e:
|
||||
return f"ERROR generating tree for '{path}': {e}"
|
||||
# ------------------------------------------------------------------ web tools
|
||||
|
||||
class _DDGParser(HTMLParser):
|
||||
def __init__(self) -> None:
|
||||
@@ -1306,4 +1298,4 @@ MCP_TOOL_SPECS: list[dict[str, Any]] = [
|
||||
"required": ["path"]
|
||||
}
|
||||
}
|
||||
]
|
||||
]
|
||||
|
||||
Reference in New Issue
Block a user