imported from "final" folder

This commit is contained in:
2025-11-28 12:12:50 +01:00
parent f9288986cf
commit ff8e725b35
1061 changed files with 225150 additions and 96 deletions

View File

@@ -0,0 +1,246 @@
#!/usr/bin/env python3
"""
FastLED All Source Build Script
This script implements the unified source build system for FastLED by:
1. Starting with ONE file for testing (allocator.cpp)
2. Copying original .cpp content to .cpp.hpp
3. Replacing .cpp with conditional wrapper code
4. Creating src/fastled_compile.cpp to include .cpp.hpp files
5. Adding FASTLED_ALL_SRC logic to compiler_control.h
CRITICAL: This script processes original .cpp content CORRECTLY:
- Step 1: Copy original .cpp -> .cpp.hpp (preserves source)
- Step 2: Replace .cpp with wrapper (conditional include)
"""
import os
import shutil
import glob
from pathlib import Path
from typing import List, Set
def find_cpp_files(base_dirs: List[str]) -> List[Path]:
"""Find all .cpp files in the specified directories and subdirectories."""
cpp_files = []
for base_dir in base_dirs:
if not os.path.exists(base_dir):
continue
pattern = os.path.join(base_dir, "**", "*.cpp")
cpp_files.extend(Path(f) for f in glob.glob(pattern, recursive=True))
return sorted(cpp_files)
def get_relative_include_path(file_path: Path, from_src: bool = True) -> str:
"""Get the relative include path from src/ directory."""
if from_src:
# Remove 'src/' prefix if present
parts = file_path.parts
if parts[0] == 'src':
return '/'.join(parts[1:])
return str(file_path)
return str(file_path)
def create_cpp_wrapper(original_cpp: Path, hpp_file: Path) -> str:
"""Create the wrapper .cpp file content with conditional includes."""
relative_hpp_path = get_relative_include_path(hpp_file)
content = f'''#include "fl/compiler_control.h"
#if !FASTLED_ALL_SRC
#include "{relative_hpp_path}"
#endif
'''
return content
def update_compiler_control_h(compiler_control_path: Path) -> None:
"""Add FASTLED_ALL_SRC logic to compiler_control.h."""
with open(compiler_control_path, 'r') as f:
content = f.read()
# Check if FASTLED_ALL_SRC is already defined
if 'FASTLED_ALL_SRC' in content:
print(f"FASTLED_ALL_SRC already defined in {compiler_control_path}")
return
# Add FASTLED_ALL_SRC logic at the end
fastled_all_src_logic = '''
// All Source Build Control
// When FASTLED_ALL_SRC is enabled, all source is compiled into a single translation unit
// Debug/testing builds use individual compilation for better error isolation
#ifndef FASTLED_ALL_SRC
#if defined(DEBUG) || defined(FASTLED_TESTING)
#define FASTLED_ALL_SRC 0
#elif !defined(RELEASE) || (RELEASE == 0)
#define FASTLED_ALL_SRC 1
#else
#define FASTLED_ALL_SRC 0
#endif
#endif
'''
updated_content = content + fastled_all_src_logic
with open(compiler_control_path, 'w') as f:
f.write(updated_content)
print(f"Updated {compiler_control_path} with FASTLED_ALL_SRC logic")
def create_fastled_compile_cpp(cpp_hpp_files: List[Path], output_path: Path) -> None:
"""Create src/fastled_compile.cpp with all .cpp.hpp includes."""
includes = []
for hpp_file in sorted(cpp_hpp_files):
relative_path = get_relative_include_path(hpp_file)
includes.append(f'#include "{relative_path}"')
content = f'''// FastLED All Source Build File
// This file includes all .cpp.hpp files for unified compilation
// Generated automatically by scripts/all_source_build.py
#include "fl/compiler_control.h"
#if FASTLED_ALL_SRC
{chr(10).join(includes)}
#endif // FASTLED_ALL_SRC
'''
with open(output_path, 'w') as f:
f.write(content)
print(f"Created {output_path} with {len(includes)} includes")
def process_single_cpp_file(cpp_file: Path, dry_run: bool = False) -> Path:
"""
Process a single .cpp file, converting it to the new format.
CRITICAL LOGIC:
1. Copy original .cpp content to .cpp.hpp (PRESERVES SOURCE)
2. Replace .cpp with wrapper code (CONDITIONAL INCLUDE)
Returns the .cpp.hpp file path.
"""
hpp_file = cpp_file.with_suffix('.cpp.hpp')
print(f"Processing {cpp_file} -> {hpp_file}")
if not dry_run:
# STEP 1: Copy original .cpp content to .cpp.hpp (PRESERVE ORIGINAL SOURCE)
print(f" Step 1: Copying original content {cpp_file} -> {hpp_file}")
shutil.copy2(cpp_file, hpp_file)
# STEP 2: Replace .cpp with wrapper code (CONDITIONAL INCLUDE)
print(f" Step 2: Replacing {cpp_file} with wrapper code")
wrapper_content = create_cpp_wrapper(cpp_file, hpp_file)
with open(cpp_file, 'w') as f:
f.write(wrapper_content)
return hpp_file
def process_cpp_files(cpp_files: List[Path], dry_run: bool = False) -> List[Path]:
"""Process all .cpp files, converting them to the new format."""
processed_hpp_files = []
for cpp_file in cpp_files:
hpp_file = process_single_cpp_file(cpp_file, dry_run)
processed_hpp_files.append(hpp_file)
return processed_hpp_files
def main():
"""Main function to run the all source build transformation."""
import argparse
parser = argparse.ArgumentParser(description='FastLED All Source Build Script')
parser.add_argument('--dry-run', action='store_true',
help='Show what would be done without making changes')
parser.add_argument('--src-dir', default='src',
help='Source directory (default: src)')
parser.add_argument('--single-file', action='store_true',
help='Process only allocator.cpp for testing')
args = parser.parse_args()
# Define target directories
target_dirs = [
os.path.join(args.src_dir, 'fl'),
os.path.join(args.src_dir, 'sensors'),
os.path.join(args.src_dir, 'fx')
]
# Check if directories exist
for dir_path in target_dirs:
if not os.path.exists(dir_path):
print(f"Warning: Directory {dir_path} does not exist")
# Find all .cpp files
all_cpp_files = find_cpp_files(target_dirs)
# If single-file mode, only process allocator.cpp
if args.single_file:
allocator_files = [f for f in all_cpp_files if f.name == 'allocator.cpp']
if not allocator_files:
print("ERROR: allocator.cpp not found!")
return
cpp_files = allocator_files
print(f"SINGLE FILE MODE: Processing only {cpp_files[0]}")
else:
cpp_files = all_cpp_files
print(f"Found {len(cpp_files)} .cpp files:")
for cpp_file in cpp_files:
print(f" {cpp_file}")
if not cpp_files:
print("No .cpp files found!")
return
if args.dry_run:
print("\n--- DRY RUN MODE ---")
print("The following actions would be performed:")
for cpp_file in cpp_files:
hpp_file = cpp_file.with_suffix('.cpp.hpp')
print(f" 1. Copy {cpp_file} -> {hpp_file} (preserve original)")
print(f" 2. Replace {cpp_file} with conditional wrapper")
print(f" 3. Update {args.src_dir}/fl/compiler_control.h")
print(f" 4. Create {args.src_dir}/fastled_compile.cpp")
return
print(f"\nStarting all source build transformation...")
# Process .cpp files
processed_hpp_files = process_cpp_files(cpp_files, dry_run=args.dry_run)
# Update compiler_control.h
compiler_control_path = Path(args.src_dir) / 'fl' / 'compiler_control.h'
if compiler_control_path.exists():
update_compiler_control_h(compiler_control_path)
else:
print(f"Warning: {compiler_control_path} not found")
# Create fastled_compile.cpp
fastled_compile_path = Path(args.src_dir) / 'fastled_compile.cpp'
create_fastled_compile_cpp(processed_hpp_files, fastled_compile_path)
print(f"\nAll source build transformation complete!")
print(f"Processed {len(cpp_files)} .cpp files")
print(f"Created {len(processed_hpp_files)} .cpp.hpp files")
if args.single_file:
print(f"\n*** SINGLE FILE MODE COMPLETE ***")
print(f"Test with: bash test")
print(f"If successful, run again without --single-file for full conversion")
if __name__ == '__main__':
main()

View File

@@ -0,0 +1,133 @@
#!/usr/bin/env python3
"""
Disable All Source Build System
This script temporarily disables the all source build system by:
1. Removing wrapper logic from .cpp files
2. Restoring normal compilation behavior
3. Commenting out the FASTLED_ALL_SRC logic
This is a recovery tool to get the system back to a working state.
"""
import os
import glob
from pathlib import Path
def disable_wrapper_file(cpp_file: Path) -> bool:
"""Disable wrapper logic in a .cpp file by commenting out the conditional compilation."""
try:
with open(cpp_file, 'r') as f:
content = f.read()
# Check if it's a wrapper file (contains FASTLED_ALL_SRC logic)
if 'FASTLED_ALL_SRC' in content and 'compiler_control.h' in content:
print(f"Disabling wrapper: {cpp_file}")
# Comment out the wrapper logic but keep the structure
lines = content.split('\n')
new_lines = []
for line in lines:
if any(keyword in line for keyword in ['#include "fl/compiler_control.h"',
'#if !FASTLED_ALL_SRC',
'#include',
'#endif']):
new_lines.append(f"// DISABLED: {line}")
else:
new_lines.append(line)
# Add a simple placeholder implementation
new_content = '\n'.join(new_lines)
new_content += f'''
// TEMPORARY PLACEHOLDER - This file needs proper implementation
// All source build wrapper has been disabled to restore compilation
// TODO: Restore original {cpp_file.name} implementation
'''
with open(cpp_file, 'w') as f:
f.write(new_content)
return True
return False
except Exception as e:
print(f"Error processing {cpp_file}: {e}")
return False
def disable_compiler_control():
"""Disable FASTLED_ALL_SRC logic in compiler_control.h."""
control_file = Path("src/fl/compiler_control.h")
if not control_file.exists():
print(f"Warning: {control_file} not found")
return
with open(control_file, 'r') as f:
content = f.read()
if 'FASTLED_ALL_SRC' in content:
print(f"Disabling FASTLED_ALL_SRC logic in {control_file}")
# Comment out the entire FASTLED_ALL_SRC section
lines = content.split('\n')
new_lines = []
in_all_src_section = False
for line in lines:
if 'All Source Build Control' in line:
in_all_src_section = True
new_lines.append(f"// DISABLED: {line}")
elif in_all_src_section and line.strip() == '':
in_all_src_section = False
new_lines.append(line)
elif in_all_src_section:
new_lines.append(f"// DISABLED: {line}")
else:
new_lines.append(line)
# Add a simple disable definition
new_content = '\n'.join(new_lines)
new_content += '''
// TEMPORARY DISABLE - All source build system disabled for recovery
#ifndef FASTLED_ALL_SRC
#define FASTLED_ALL_SRC 0 // Always use individual compilation
#endif
'''
with open(control_file, 'w') as f:
f.write(new_content)
def main():
"""Main function to disable the all source build system."""
print("🛑 Disabling All Source Build System for Recovery")
# Find all .cpp files that might have wrapper code
cpp_patterns = [
"src/**/*.cpp",
]
disabled_count = 0
for pattern in cpp_patterns:
for filepath in glob.glob(pattern, recursive=True):
cpp_file = Path(filepath)
if cpp_file.name != "fastled_compile.cpp": # Don't modify the unified file
if disable_wrapper_file(cpp_file):
disabled_count += 1
# Disable compiler control logic
disable_compiler_control()
print(f"\n✅ All source build system disabled!")
print(f" - Disabled {disabled_count} wrapper files")
print(f" - Commented out FASTLED_ALL_SRC logic")
print(f"\n⚠ This is a TEMPORARY recovery state")
print(f" - System should now compile with individual .cpp files")
print(f" - You can now implement proper all source build system")
if __name__ == "__main__":
main()

View File

@@ -0,0 +1,493 @@
#!/usr/bin/env python3
"""
FastLED JavaScript Type Enhancement and Linting Strategy Script
This script provides multiple approaches for enhancing JavaScript code quality
while keeping files in .js format. Uses fast Node.js + ESLint for linting.
1. Linting Analysis - Analyze current ESLint issues and provide fixes
2. Type Safety - Add comprehensive JSDoc annotations and type checking
3. Performance - Identify and fix performance issues (await in loops, etc.)
4. Code Quality - Consistent formatting and best practices
5. Config Management - Create ESLint configuration variants
Usage:
uv run scripts/enhance-js-typing.py --approach linting
uv run scripts/enhance-js-typing.py --approach performance
uv run scripts/enhance-js-typing.py --approach types --file src/platforms/wasm/compiler/index.js
uv run scripts/enhance-js-typing.py --approach configs
uv run scripts/enhance-js-typing.py --approach summary
"""
import argparse
import re
import json
import subprocess
from pathlib import Path
from typing import List, Dict, Match, TypedDict, Union
class LintIssue(TypedDict):
"""Type definition for a linting issue"""
rule: str
message: str
file: str
line: int
severity: int
class LintResult(TypedDict, total=False):
"""Type definition for lint analysis result"""
issues: List[LintIssue]
summary: str
raw_output: str
error: str
class JSLintingEnhancer:
"""Comprehensive JavaScript linting and type enhancement tool"""
def __init__(self) -> None:
self.workspace_root: Path = Path.cwd()
self.wasm_dir: Path = self.workspace_root / "src" / "platforms" / "wasm"
self.js_files: List[Path] = []
self._discover_files()
def _discover_files(self) -> None:
"""Discover all JavaScript files in the WASM directory"""
if self.wasm_dir.exists():
self.js_files = list(self.wasm_dir.rglob("*.js"))
def analyze_current_linting(self) -> LintResult:
"""Analyze current linting issues using fast ESLint"""
print("🔍 Analyzing current linting issues...")
try:
# Check if fast linting is available
import platform
eslint_exe = ".cache/js-tools/node_modules/.bin/eslint.cmd" if platform.system() == "Windows" else ".cache/js-tools/node_modules/.bin/eslint"
if not Path(eslint_exe).exists():
return LintResult(issues=[], summary="Fast linting not available. Run: uv run ci/setup-js-linting-fast.py", error="eslint_not_found")
# Run ESLint with JSON output
result = subprocess.run(
[eslint_exe, "--format", "json", "--no-eslintrc", "--no-inline-config", "-c", ".cache/js-tools/.eslintrc.js",
"src/platforms/wasm/compiler/*.js", "src/platforms/wasm/compiler/modules/*.js"],
capture_output=True,
text=True,
cwd=self.workspace_root,
shell=platform.system() == "Windows"
)
if result.returncode == 0:
print("✅ No linting issues found!")
return LintResult(issues=[], summary="clean", raw_output="", error="")
# Parse ESLint JSON output
issues: List[LintIssue] = []
if result.stdout:
try:
lint_data = json.loads(result.stdout)
for file_result in lint_data:
for message in file_result.get('messages', []):
issues.append(LintIssue(
rule=message.get('ruleId', 'unknown'),
message=message.get('message', ''),
file=file_result.get('filePath', ''),
line=message.get('line', 0),
severity=message.get('severity', 1)
))
except json.JSONDecodeError:
pass
return LintResult(
issues=issues,
summary=f"Found {len(issues)} linting issues",
raw_output=result.stdout,
error=""
)
except Exception as e:
print(f"❌ Error running linter: {e}")
return LintResult(issues=[], summary="error", raw_output="", error=str(e))
def _parse_text_lint_output(self, output: str) -> List[LintIssue]:
"""Parse text-based lint output into structured format"""
issues: List[LintIssue] = []
lines = output.split('\n')
current_issue = {}
for line in lines:
line = line.strip()
if not line:
continue
# Look for rule violations
if line.startswith('(') and ')' in line:
rule_match = re.match(r'\(([^)]+)\)\s*(.*)', line)
if rule_match:
rule_name = rule_match.group(1)
message = rule_match.group(2)
current_issue: Dict[str, Union[str, int]] = {
"rule": rule_name,
"message": message,
"file": "",
"line": 0,
"severity": 1
}
elif 'at /' in line:
# Extract file and line info
match = re.search(r'at\s+([^:]+):(\d+)', line)
if match and current_issue:
issues.append(LintIssue(
rule=str(current_issue["rule"]),
message=str(current_issue["message"]),
file=match.group(1),
line=int(match.group(2)),
severity=1
))
current_issue = {}
return issues
def fix_await_in_loop_issues(self) -> List[Dict[str, Union[str, int]]]:
"""Identify and provide fixes for await-in-loop issues"""
print("🔧 Analyzing await-in-loop issues...")
fixes: List[Dict[str, Union[str, int]]] = []
for js_file in self.js_files:
try:
content = js_file.read_text()
lines = content.split('\n')
for i, line in enumerate(lines, 1):
if 'await' in line and ('for' in line or 'while' in line):
# Simple heuristic for await in loop
context = '\n'.join(lines[max(0, i-3):i+2])
fixes.append({
"file": str(js_file.relative_to(self.workspace_root)),
"line": i,
"issue": "await in loop",
"context": context,
"suggestion": "Consider using Promise.all() for parallel execution"
})
except Exception as e:
print(f"Warning: Could not analyze {js_file}: {e}")
return fixes
def enhance_jsdoc_types(self, file_path: str) -> str:
"""Add comprehensive JSDoc type annotations to a file"""
target_file = Path(file_path)
if not target_file.exists():
return f"❌ File not found: {file_path}"
try:
content = target_file.read_text()
# Add type annotations for common patterns
enhanced_content = self._add_function_types(content)
enhanced_content = self._add_variable_types(enhanced_content)
enhanced_content = self._add_class_types(enhanced_content)
# Write enhanced content
backup_file = target_file.with_suffix('.js.bak')
target_file.rename(backup_file)
target_file.write_text(enhanced_content)
return f"✅ Enhanced {file_path} with JSDoc types (backup: {backup_file})"
except Exception as e:
return f"❌ Error enhancing {file_path}: {e}"
def _add_function_types(self, content: str) -> str:
"""Add JSDoc types to functions"""
# Pattern for function declarations
function_pattern = r'(function\s+(\w+)\s*\([^)]*\)\s*\{)'
def add_jsdoc(match: Match[str]) -> str:
func_name = match.group(2)
return f'''/**
* {func_name} function
* @param {{any}} ...args - Function arguments
* @returns {{any}} Function result
*/
{match.group(1)}'''
return re.sub(function_pattern, add_jsdoc, content)
def _add_variable_types(self, content: str) -> str:
"""Add JSDoc types to variables"""
# Pattern for const/let declarations
var_pattern = r'(const|let)\s+(\w+)\s*='
def add_type_comment(match: Match[str]) -> str:
return f'{match.group(0)} /** @type {{any}} */'
return re.sub(var_pattern, add_type_comment, content)
def _add_class_types(self, content: str) -> str:
"""Add JSDoc types to classes"""
# Pattern for class declarations
class_pattern = r'(class\s+(\w+))'
def add_class_jsdoc(match: Match[str]) -> str:
class_name = match.group(2)
return f'''/**
* {class_name} class
* @class
*/
{match.group(1)}'''
return re.sub(class_pattern, add_class_jsdoc, content)
def generate_type_definitions(self) -> str:
"""Generate enhanced type definitions"""
print("📝 Generating enhanced type definitions...")
types_content = '''/**
* Enhanced FastLED WASM Type Definitions
* Auto-generated comprehensive types for improved type safety
*/
// Browser Environment Extensions
declare global {
interface Window {
audioData?: {
audioBuffers: Record<string, AudioBufferStorage>;
hasActiveSamples: boolean;
};
// FastLED global functions
FastLED_onFrame?: (frameData: any, callback: Function) => void;
FastLED_onStripUpdate?: (data: any) => void;
FastLED_onStripAdded?: (id: number, length: number) => void;
FastLED_onUiElementsAdded?: (data: any) => void;
}
// Audio API types
interface AudioWorkletProcessor {
process(inputs: Float32Array[][], outputs: Float32Array[][], parameters: Record<string, Float32Array>): boolean;
}
// WebAssembly Module types
interface WASMModule {
_malloc(size: number): number;
_free(ptr: number): void;
_extern_setup?: () => void;
_extern_loop?: () => void;
ccall(name: string, returnType: string, argTypes: string[], args: any[]): any;
}
}
// FastLED specific interfaces
interface FastLEDConfig {
canvasId: string;
uiControlsId: string;
outputId: string;
frameRate?: number;
}
interface StripData {
strip_id: number;
pixel_data: Uint8Array;
length: number;
diameter?: number;
}
interface ScreenMapData {
strips: Record<string, Array<{ x: number; y: number }>>;
absMin: [number, number];
absMax: [number, number];
}
export {};
'''
types_file = self.workspace_root / "src" / "platforms" / "wasm" / "types.enhanced.d.ts"
types_file.write_text(types_content)
return f"✅ Generated enhanced types: {types_file}"
def create_linting_config_variants(self) -> List[str]:
"""Create different ESLint configuration variants and return success messages"""
print("⚙️ Creating ESLint configuration variants...")
configs: List[str] = []
# Strict ESLint config
strict_config = '''module.exports = {
env: {
browser: true,
es2022: true,
worker: true
},
parserOptions: {
ecmaVersion: 2022,
sourceType: "module"
},
rules: {
// Critical issues
"no-debugger": "error",
"no-eval": "error",
// Code quality
"eqeqeq": "error",
"prefer-const": "error",
"no-var": "error",
"no-await-in-loop": "error",
"guard-for-in": "error",
"camelcase": "warn",
"default-param-last": "warn"
}
};'''
# Minimal ESLint config
minimal_config = '''module.exports = {
env: {
browser: true,
es2022: true,
worker: true
},
parserOptions: {
ecmaVersion: 2022,
sourceType: "module"
},
rules: {
// Only critical runtime issues
"no-debugger": "error",
"no-eval": "error"
}
};'''
# Write config variants
strict_file = self.workspace_root / ".cache/js-tools" / ".eslintrc.strict.js"
minimal_file = self.workspace_root / ".cache/js-tools" / ".eslintrc.minimal.js"
# Ensure .cache/js-tools directory exists
(self.workspace_root / ".cache/js-tools").mkdir(parents=True, exist_ok=True)
strict_file.write_text(strict_config)
minimal_file.write_text(minimal_config)
configs.extend([
f"✅ Created strict ESLint config: {strict_file}",
f"✅ Created minimal ESLint config: {minimal_file}"
])
return configs
def run_summary(self) -> str:
"""Generate comprehensive summary of JavaScript codebase and return formatted report"""
print("📊 Generating comprehensive JavaScript codebase summary...")
summary: List[str] = []
summary.append("=" * 80)
summary.append("FASTLED JAVASCRIPT LINTING & TYPE SAFETY SUMMARY")
summary.append("=" * 80)
# File statistics
total_lines = 0
total_files = len(self.js_files)
for js_file in self.js_files:
try:
lines = len(js_file.read_text().split('\n'))
total_lines += lines
except:
pass
summary.append(f"\n📁 CODEBASE OVERVIEW:")
summary.append(f" • Total JavaScript files: {total_files}")
summary.append(f" • Total lines of code: {total_lines:,}")
summary.append(f" • Average file size: {total_lines // max(total_files, 1):,} lines")
# Current linting status
lint_result = self.analyze_current_linting()
summary.append(f"\n🔍 CURRENT LINTING STATUS:")
summary.append(f"{lint_result.get('summary', 'No summary available')}")
issues = lint_result.get('issues', [])
if issues:
rule_counts: Dict[str, int] = {}
for issue in issues:
rule = issue.get('rule', 'unknown')
rule_counts[rule] = rule_counts.get(rule, 0) + 1
summary.append(f" • Most common issues:")
# Sort by count
for rule, count in sorted(rule_counts.items(), key=lambda x: x[1], reverse=True)[:5]:
summary.append(f" - {rule}: {count} occurrences")
# Recommendations
summary.append(f"\n🎯 RECOMMENDED NEXT STEPS:")
summary.append(f" 1. Fix critical performance issues (await-in-loop)")
summary.append(f" 2. Gradually enable stricter type checking per file")
summary.append(f" 3. Add comprehensive JSDoc annotations")
summary.append(f" 4. Enable additional linting rules incrementally")
summary.append(f" 5. Create automated type checking CI pipeline")
# Available tools
summary.append(f"\n🛠️ AVAILABLE ENHANCEMENT TOOLS:")
summary.append(f" • uv run scripts/enhance-js-typing.py --approach linting")
summary.append(f" • uv run scripts/enhance-js-typing.py --approach performance")
summary.append(f" • uv run scripts/enhance-js-typing.py --approach types --file <path>")
summary.append(f" • uv run scripts/enhance-js-typing.py --approach configs")
summary.append(f" • bash .cache/js-tools/lint-js-fast # Fast ESLint linting")
summary.append(f" • bash lint # Full project linting")
summary.append("\n" + "=" * 80)
return "\n".join(summary)
def main():
parser = argparse.ArgumentParser(description="FastLED JavaScript Enhancement Tool")
parser.add_argument("--approach", choices=["summary", "linting", "performance", "types", "configs"],
default="summary", help="Enhancement approach")
parser.add_argument("--file", help="Specific file to enhance (for types approach)")
args = parser.parse_args()
enhancer = JSLintingEnhancer()
if args.approach == "summary":
print(enhancer.run_summary())
elif args.approach == "linting":
result = enhancer.analyze_current_linting()
print(f"\n🔍 LINTING ANALYSIS:")
print(f" {result.get('summary', 'No summary available')}")
issues = result.get('issues', [])
if issues:
print(f"\n📋 ISSUES BREAKDOWN:")
for issue in issues[:10]: # Show first 10
print(f"{issue.get('rule', 'unknown')}: {issue.get('message', 'No message')}")
if issue.get('file'):
print(f" File: {issue.get('file', '')}:{issue.get('line', '?')}")
elif args.approach == "performance":
fixes = enhancer.fix_await_in_loop_issues()
print(f"\n⚡ PERFORMANCE ANALYSIS:")
print(f" Found {len(fixes)} potential await-in-loop issues")
for fix in fixes[:5]: # Show first 5
print(f"\n 📁 {fix['file']}:{fix['line']}")
print(f" Issue: {fix['issue']}")
print(f" Suggestion: {fix['suggestion']}")
elif args.approach == "types":
if not args.file:
print("❌ --file argument required for types approach")
return
result = enhancer.enhance_jsdoc_types(args.file)
print(result)
# Also generate enhanced type definitions
type_result = enhancer.generate_type_definitions()
print(type_result)
elif args.approach == "configs":
configs = enhancer.create_linting_config_variants()
for config in configs:
print(config)
if __name__ == "__main__":
main()