diff --git a/extensions/fine_python_ast/fine_python_ast/ast_provider.py b/extensions/fine_python_ast/fine_python_ast/ast_provider.py index 2bf73b0..6f4e181 100644 --- a/extensions/fine_python_ast/fine_python_ast/ast_provider.py +++ b/extensions/fine_python_ast/fine_python_ast/ast_provider.py @@ -3,20 +3,23 @@ from fine_python_ast import iast_provider -from finecode_extension_api.interfaces import icache, ifilemanager, ilogger +from finecode_extension_api.interfaces import icache, ifileeditor, ilogger class PythonSingleAstProvider(iast_provider.IPythonSingleAstProvider): CACHE_KEY = "PythonSingleAstProvider" + FILE_OPERATION_AUTHOR = ifileeditor.FileOperationAuthor( + id="PythonSingleAstProvider" + ) def __init__( self, - file_manager: ifilemanager.IFileManager, + file_editor: ifileeditor.IFileEditor, cache: icache.ICache, logger: ilogger.ILogger, ): self.cache = cache - self.file_manager = file_manager + self.file_editor = file_editor self.logger = logger async def get_file_ast(self, file_path: Path) -> ast.Module: @@ -30,8 +33,12 @@ async def get_file_ast(self, file_path: Path) -> ast.Module: except icache.CacheMissException: ... - file_content: str = await self.file_manager.get_content(file_path) - file_version: str = await self.file_manager.get_file_version(file_path) + async with self.file_editor.session( + author=self.FILE_OPERATION_AUTHOR + ) as session: + async with session.read_file(file_path=file_path) as file_info: + file_content: str = file_info.content + file_version: str = file_info.version try: ast_instance = ast.parse(file_content) diff --git a/extensions/fine_python_ast/pyproject.toml b/extensions/fine_python_ast/pyproject.toml index de61a52..553b7f1 100644 --- a/extensions/fine_python_ast/pyproject.toml +++ b/extensions/fine_python_ast/pyproject.toml @@ -18,3 +18,7 @@ finecode_dev_common_preset = { path = "../../finecode_dev_common_preset", editab finecode = { path = "../../", editable = true } finecode_extension_runner = { path = "../../finecode_extension_runner", editable = true } finecode_extension_api = { path = "../../finecode_extension_api", editable = true } +finecode_jsonrpc = { path = "../../finecode_jsonrpc", editable = true } +finecode_builtin_handlers = { path = "../../finecode_builtin_handlers", editable = true } +fine_python_lint = { path = "../../presets/fine_python_lint", editable = true } +fine_python_format = { path = "../../presets/fine_python_format", editable = true } diff --git a/extensions/fine_python_black/pyproject.toml b/extensions/fine_python_black/pyproject.toml index fca8767..3664e6d 100644 --- a/extensions/fine_python_black/pyproject.toml +++ b/extensions/fine_python_black/pyproject.toml @@ -18,3 +18,7 @@ finecode_dev_common_preset = { path = "../../finecode_dev_common_preset", editab finecode = { path = "../../", editable = true } finecode_extension_runner = { path = "../../finecode_extension_runner", editable = true } finecode_extension_api = { path = "../../finecode_extension_api", editable = true } +finecode_jsonrpc = { path = "../../finecode_jsonrpc", editable = true } +finecode_builtin_handlers = { path = "../../finecode_builtin_handlers", editable = true } +fine_python_lint = { path = "../../presets/fine_python_lint", editable = true } +fine_python_format = { path = "../../presets/fine_python_format", editable = true } diff --git a/extensions/fine_python_flake8/fine_python_flake8/__init__.py b/extensions/fine_python_flake8/fine_python_flake8/__init__.py index 67c886c..27f2788 100644 --- a/extensions/fine_python_flake8/fine_python_flake8/__init__.py +++ b/extensions/fine_python_flake8/fine_python_flake8/__init__.py @@ -1,6 +1,6 @@ -from .action import Flake8LintHandler, Flake8LintHandlerConfig +from .action import Flake8LintFilesHandler, Flake8LintFilesHandlerConfig __all__ = [ - "Flake8LintHandler", - "Flake8LintHandlerConfig", + "Flake8LintFilesHandler", + "Flake8LintFilesHandlerConfig", ] diff --git a/extensions/fine_python_flake8/fine_python_flake8/action.py b/extensions/fine_python_flake8/fine_python_flake8/action.py index f55f8e6..e9c37f3 100644 --- a/extensions/fine_python_flake8/fine_python_flake8/action.py +++ b/extensions/fine_python_flake8/fine_python_flake8/action.py @@ -12,21 +12,21 @@ from flake8.plugins import finder from finecode_extension_api import code_action -from finecode_extension_api.actions import lint as lint_action +from finecode_extension_api.actions import lint_files as lint_files_action from finecode_extension_api.interfaces import ( icache, - ifilemanager, + ifileeditor, ilogger, iprocessexecutor, ) -def map_flake8_check_result_to_lint_message(result: tuple) -> lint_action.LintMessage: +def map_flake8_check_result_to_lint_message(result: tuple) -> lint_files_action.LintMessage: error_code, line_number, column, text, physical_line = result - return lint_action.LintMessage( - range=lint_action.Range( - start=lint_action.Position(line=line_number, character=column), - end=lint_action.Position( + return lint_files_action.LintMessage( + range=lint_files_action.Range( + start=lint_files_action.Position(line=line_number, character=column), + end=lint_files_action.Position( line=line_number, character=len(physical_line) if physical_line is not None else column, ), @@ -35,9 +35,9 @@ def map_flake8_check_result_to_lint_message(result: tuple) -> lint_action.LintMe code=error_code, source="flake8", severity=( - lint_action.LintMessageSeverity.WARNING + lint_files_action.LintMessageSeverity.WARNING if error_code.startswith("W") - else lint_action.LintMessageSeverity.ERROR + else lint_files_action.LintMessageSeverity.ERROR ), ) @@ -46,9 +46,9 @@ def run_flake8_on_single_file( file_path: Path, file_content: str, file_ast: ast.Module, - config: Flake8LintHandlerConfig, -) -> list[lint_action.LintMessage]: - lint_messages: list[lint_action.LintMessage] = [] + config: Flake8LintFilesHandlerConfig, +) -> list[lint_files_action.LintMessage]: + lint_messages: list[lint_files_action.LintMessage] = [] # flake8 expects lines with newline at the end file_lines = [line + "\n" for line in file_content.split("\n")] # TODO: investigate whether guide and decider can be reused. They cannot be @@ -109,31 +109,34 @@ def run_flake8_on_single_file( @dataclasses.dataclass -class Flake8LintHandlerConfig(code_action.ActionHandlerConfig): +class Flake8LintFilesHandlerConfig(code_action.ActionHandlerConfig): max_line_length: int = 79 select: list[str] | None = None extend_select: list[str] | None = None extend_ignore: list[str] | None = None -class Flake8LintHandler( - code_action.ActionHandler[lint_action.LintAction, Flake8LintHandlerConfig] +class Flake8LintFilesHandler( + code_action.ActionHandler[lint_files_action.LintFilesAction, Flake8LintFilesHandlerConfig] ): CACHE_KEY = "flake8" + FILE_OPERATION_AUTHOR = ifileeditor.FileOperationAuthor( + id="Flake8LintFilesHandler" + ) def __init__( self, - config: Flake8LintHandlerConfig, + config: Flake8LintFilesHandlerConfig, cache: icache.ICache, logger: ilogger.ILogger, - file_manager: ifilemanager.IFileManager, + file_editor: ifileeditor.IFileEditor, ast_provider: iast_provider.IPythonSingleAstProvider, process_executor: iprocessexecutor.IProcessExecutor, ) -> None: self.config = config self.cache = cache self.logger = logger - self.file_manager = file_manager + self.file_editor = file_editor self.ast_provider = ast_provider self.process_executor = process_executor @@ -145,19 +148,24 @@ def __init__( async def run_on_single_file( self, file_path: Path - ) -> lint_action.LintRunResult | None: + ) -> lint_files_action.LintFilesRunResult | None: messages = {} try: cached_lint_messages = await self.cache.get_file_cache( file_path, self.CACHE_KEY ) messages[str(file_path)] = cached_lint_messages - return lint_action.LintRunResult(messages=messages) + return lint_files_action.LintFilesRunResult(messages=messages) except icache.CacheMissException: pass - file_content = await self.file_manager.get_content(file_path) - file_version = await self.file_manager.get_file_version(file_path) + async with self.file_editor.session( + author=self.FILE_OPERATION_AUTHOR + ) as session: + async with session.read_file(file_path=file_path) as file_info: + file_content: str = file_info.content + file_version: str = file_info.version + try: file_ast = await self.ast_provider.get_file_ast(file_path=file_path) except SyntaxError: @@ -175,11 +183,11 @@ async def run_on_single_file( file_path, file_version, self.CACHE_KEY, lint_messages ) - return lint_action.LintRunResult(messages=messages) + return lint_files_action.LintFilesRunResult(messages=messages) async def run( self, - payload: lint_action.LintRunPayload, + payload: lint_files_action.LintFilesRunPayload, run_context: code_action.RunActionWithPartialResultsContext, ) -> None: if self.config.select is not None and len(self.config.select) == 0: diff --git a/extensions/fine_python_import_linter/pyproject.toml b/extensions/fine_python_import_linter/pyproject.toml index cbdec0c..4e1b8a6 100644 --- a/extensions/fine_python_import_linter/pyproject.toml +++ b/extensions/fine_python_import_linter/pyproject.toml @@ -23,3 +23,7 @@ finecode_dev_common_preset = { path = "../../finecode_dev_common_preset", editab finecode = { path = "../../", editable = true } finecode_extension_runner = { path = "../../finecode_extension_runner", editable = true } finecode_extension_api = { path = "../../finecode_extension_api", editable = true } +finecode_jsonrpc = { path = "../../finecode_jsonrpc", editable = true } +finecode_builtin_handlers = { path = "../../finecode_builtin_handlers", editable = true } +fine_python_lint = { path = "../../presets/fine_python_lint", editable = true } +fine_python_format = { path = "../../presets/fine_python_format", editable = true } diff --git a/extensions/fine_python_isort/pyproject.toml b/extensions/fine_python_isort/pyproject.toml index 8392d6f..7dd3434 100644 --- a/extensions/fine_python_isort/pyproject.toml +++ b/extensions/fine_python_isort/pyproject.toml @@ -18,3 +18,7 @@ finecode_dev_common_preset = { path = "../../finecode_dev_common_preset", editab finecode = { path = "../../", editable = true } finecode_extension_runner = { path = "../../finecode_extension_runner", editable = true } finecode_extension_api = { path = "../../finecode_extension_api", editable = true } +finecode_jsonrpc = { path = "../../finecode_jsonrpc", editable = true } +finecode_builtin_handlers = { path = "../../finecode_builtin_handlers", editable = true } +fine_python_lint = { path = "../../presets/fine_python_lint", editable = true } +fine_python_format = { path = "../../presets/fine_python_format", editable = true } diff --git a/extensions/fine_python_module_exports/pyproject.toml b/extensions/fine_python_module_exports/pyproject.toml index e62bb0f..8d6e99a 100644 --- a/extensions/fine_python_module_exports/pyproject.toml +++ b/extensions/fine_python_module_exports/pyproject.toml @@ -18,3 +18,7 @@ finecode_dev_common_preset = { path = "../../finecode_dev_common_preset", editab finecode = { path = "../../", editable = true } finecode_extension_runner = { path = "../../finecode_extension_runner", editable = true } finecode_extension_api = { path = "../../finecode_extension_api", editable = true } +finecode_jsonrpc = { path = "../../finecode_jsonrpc", editable = true } +finecode_builtin_handlers = { path = "../../finecode_builtin_handlers", editable = true } +fine_python_lint = { path = "../../presets/fine_python_lint", editable = true } +fine_python_format = { path = "../../presets/fine_python_format", editable = true } diff --git a/extensions/fine_python_mypy/fine_python_mypy/action.py b/extensions/fine_python_mypy/fine_python_mypy/action.py index 7723671..6f01a1a 100644 --- a/extensions/fine_python_mypy/fine_python_mypy/action.py +++ b/extensions/fine_python_mypy/fine_python_mypy/action.py @@ -13,7 +13,7 @@ from finecode_extension_api.interfaces import ( icache, icommandrunner, - ifilemanager, + ifileeditor, ilogger, iextensionrunnerinfoprovider, iprojectinfoprovider, @@ -31,6 +31,7 @@ class MypyLintHandler( code_action.ActionHandler[lint_action.LintAction, MypyManyCodeActionConfig] ): CACHE_KEY = "mypy" + FILE_OPERATION_AUTHOR = ifileeditor.FileOperationAuthor(erovid) DMYPY_ARGS = [ "--no-color-output", @@ -50,7 +51,7 @@ def __init__( project_info_provider: iprojectinfoprovider.IProjectInfoProvider, cache: icache.ICache, logger: ilogger.ILogger, - file_manager: ifilemanager.IFileManager, + file_editor: ifileeditor.IFileEditor, lifecycle: code_action.ActionHandlerLifecycle, command_runner: icommandrunner.ICommandRunner, ) -> None: @@ -58,7 +59,7 @@ def __init__( self.project_info_provider = project_info_provider self.cache = cache self.logger = logger - self.file_manager = file_manager + self.file_editor = file_editor self.command_runner = command_runner lifecycle.on_shutdown(self.shutdown) @@ -116,9 +117,12 @@ async def run_on_single_file( files_versions: dict[Path, str] = {} # can we exclude cached files here? Using the right cache(one that handles # dependencies as well) should be possible - for file_path in all_project_files: - file_version = await self.file_manager.get_file_version(file_path) - files_versions[file_path] = file_version + async with self.file_editor.session( + author=self.FILE_OPERATION_AUTHOR + ) as session: + for file_path in all_project_files: + file_version = await session.read_file_version(file_path) + files_versions[file_path] = file_version try: all_processed_files_with_messages = await self._run_dmypy_on_project( @@ -132,22 +136,25 @@ async def run_on_single_file( ) in all_processed_files_with_messages.items() } - for ( - file_path, - lint_messages, - ) in all_processed_files_with_messages.items(): - try: - file_version = files_versions[file_path] - except KeyError: - # mypy can resolve dependencies which are not in `files_to_lint` - # and as result also not in `files_versions` - file_version = await self.file_manager.get_file_version( - file_path + async with self.file_editor.session( + author=self.FILE_OPERATION_AUTHOR + ) as session: + for ( + file_path, + lint_messages, + ) in all_processed_files_with_messages.items(): + try: + file_version = files_versions[file_path] + except KeyError: + # mypy can resolve dependencies which are not in `files_to_lint` + # and as result also not in `files_versions` + file_version = await session.read_file_version( + file_path + ) + + await self.cache.save_file_cache( + file_path, file_version, self.CACHE_KEY, lint_messages ) - - await self.cache.save_file_cache( - file_path, file_version, self.CACHE_KEY, lint_messages - ) finally: project_checked_event.set() del self._projects_being_checked_done_events[project_path] diff --git a/extensions/fine_python_mypy/fine_python_mypy/ast_provider.py b/extensions/fine_python_mypy/fine_python_mypy/ast_provider.py index d81fcfc..f5e8e5a 100644 --- a/extensions/fine_python_mypy/fine_python_mypy/ast_provider.py +++ b/extensions/fine_python_mypy/fine_python_mypy/ast_provider.py @@ -7,20 +7,23 @@ import mypy.options as mypy_options from fine_python_mypy import iast_provider -from finecode_extension_api.interfaces import icache, ifilemanager, ilogger +from finecode_extension_api.interfaces import icache, ifileeditor, ilogger class MypySingleAstProvider(iast_provider.IMypySingleAstProvider): CACHE_KEY = "MypySingleAstProvider" + FILE_OPERATION_AUTHOR = ifileeditor.FileOperationAuthor( + id="MypySingleAstProvider" + ) def __init__( self, - file_manager: ifilemanager.IFileManager, + file_editor: ifileeditor.IFileEditor, cache: icache.ICache, logger: ilogger.ILogger, ): self.cache = cache - self.file_manager = file_manager + self.file_editor = file_editor self.logger = logger async def get_file_ast(self, file_path: Path) -> mypy_nodes.MypyFile: @@ -34,8 +37,13 @@ async def get_file_ast(self, file_path: Path) -> mypy_nodes.MypyFile: except icache.CacheMissException: ... - file_text: str = await self.file_manager.get_content(file_path) - file_version: str = await self.file_manager.get_file_version(file_path) + async with self.file_editor.session( + author=self.FILE_OPERATION_AUTHOR + ) as session: + async with session.read_file(file_path=file_path) as file_info: + file_text: str = file_info.content + file_version: str = file_info.version + base_dir = self.get_file_package_parent_dir_path(file_path) module_program_path = self.get_file_program_path( file_path=file_path, root_package_parent_dir_path=base_dir diff --git a/extensions/fine_python_mypy/pyproject.toml b/extensions/fine_python_mypy/pyproject.toml index 2f2e6ac..cce9b2f 100644 --- a/extensions/fine_python_mypy/pyproject.toml +++ b/extensions/fine_python_mypy/pyproject.toml @@ -18,3 +18,7 @@ finecode_dev_common_preset = { path = "../../finecode_dev_common_preset", editab finecode = { path = "../../", editable = true } finecode_extension_runner = { path = "../../finecode_extension_runner", editable = true } finecode_extension_api = { path = "../../finecode_extension_api", editable = true } +finecode_jsonrpc = { path = "../../finecode_jsonrpc", editable = true } +finecode_builtin_handlers = { path = "../../finecode_builtin_handlers", editable = true } +fine_python_lint = { path = "../../presets/fine_python_lint", editable = true } +fine_python_format = { path = "../../presets/fine_python_format", editable = true } diff --git a/extensions/fine_python_package_info/fine_python_package_info/__init__.py b/extensions/fine_python_package_info/fine_python_package_info/__init__.py index 5c467d2..85034ba 100644 --- a/extensions/fine_python_package_info/fine_python_package_info/__init__.py +++ b/extensions/fine_python_package_info/fine_python_package_info/__init__.py @@ -1,4 +1,5 @@ +from .group_project_files_by_lang_python import GroupProjectFilesByLangPythonHandler from .list_project_files_by_lang_python import ListProjectFilesByLangPythonHandler from .py_package_layout_info_provider import PyPackageLayoutInfoProvider -__all__ = ["ListProjectFilesByLangPythonHandler", "PyPackageLayoutInfoProvider"] +__all__ = ["GroupProjectFilesByLangPythonHandler","ListProjectFilesByLangPythonHandler", "PyPackageLayoutInfoProvider"] diff --git a/extensions/fine_python_package_info/fine_python_package_info/group_project_files_by_lang_python.py b/extensions/fine_python_package_info/fine_python_package_info/group_project_files_by_lang_python.py new file mode 100644 index 0000000..dd58939 --- /dev/null +++ b/extensions/fine_python_package_info/fine_python_package_info/group_project_files_by_lang_python.py @@ -0,0 +1,83 @@ +from finecode_extension_api.interfaces import ( + iprojectinfoprovider, + ipypackagelayoutinfoprovider, + ilogger, +) +import dataclasses +import pathlib + +from finecode_extension_api import code_action +from finecode_extension_api.actions import ( + group_project_files_by_lang as group_project_files_by_lang_action, +) + + +@dataclasses.dataclass +class GroupProjectFilesByLangPythonHandlerConfig(code_action.ActionHandlerConfig): + # list of relative pathes relative to project directory with additional python + # sources if they are not in one of default pathes + additional_dirs: list[pathlib.Path] | None = None + + +class GroupProjectFilesByLangPythonHandler( + code_action.ActionHandler[ + group_project_files_by_lang_action.GroupProjectFilesByLangAction, + GroupProjectFilesByLangPythonHandlerConfig, + ] +): + def __init__( + self, + config: GroupProjectFilesByLangPythonHandlerConfig, + project_info_provider: iprojectinfoprovider.IProjectInfoProvider, + py_package_layout_info_provider: ipypackagelayoutinfoprovider.IPyPackageLayoutInfoProvider, + logger: ilogger.ILogger, + ) -> None: + self.config = config + self.project_info_provider = project_info_provider + self.py_package_layout_info_provider = py_package_layout_info_provider + self.logger = logger + + self.current_project_dir_path = ( + self.project_info_provider.get_current_project_dir_path() + ) + self.tests_dir_path = self.current_project_dir_path / "tests" + self.scripts_dir_path = self.current_project_dir_path / "scripts" + self.setup_py_path = self.current_project_dir_path / "setup.py" + + async def run( + self, + payload: group_project_files_by_lang_action.GroupProjectFilesByLangRunPayload, + run_context: group_project_files_by_lang_action.GroupProjectFilesByLangRunContext, + ) -> group_project_files_by_lang_action.GroupProjectFilesByLangRunResult: + # TODO + py_files: list[pathlib.Path] = [] + project_package_src_root_dir_path = ( + await self.py_package_layout_info_provider.get_package_src_root_dir_path( + package_dir_path=self.current_project_dir_path + ) + ) + py_files += list(project_package_src_root_dir_path.rglob("*.py")) + + if self.scripts_dir_path.exists(): + py_files += list(self.scripts_dir_path.rglob("*.py")) + + if self.tests_dir_path.exists(): + py_files += list(self.tests_dir_path.rglob("*.py")) + + if self.setup_py_path.exists(): + py_files.append(self.setup_py_path) + + if self.config.additional_dirs is not None: + for dir_path in self.config.additional_dirs: + dir_absolute_path = self.current_project_dir_path / dir_path + if not dir_absolute_path.exists(): + self.logger.warning( + f"Skip {dir_path} because {dir_absolute_path} doesn't exist" + ) + continue + + py_files += list(dir_absolute_path.rglob("*.py")) + + return group_project_files_by_lang_action.GroupProjectFilesByLangRunResult( + files_by_lang={"python": py_files} + ) diff --git a/extensions/fine_python_package_info/fine_python_package_info/py_package_layout_info_provider.py b/extensions/fine_python_package_info/fine_python_package_info/py_package_layout_info_provider.py index 58d52c0..aa95583 100644 --- a/extensions/fine_python_package_info/fine_python_package_info/py_package_layout_info_provider.py +++ b/extensions/fine_python_package_info/fine_python_package_info/py_package_layout_info_provider.py @@ -4,7 +4,7 @@ import tomlkit.exceptions from finecode_extension_api.interfaces import ( - ifilemanager, + ifileeditor, ipypackagelayoutinfoprovider, icache, ) @@ -20,11 +20,14 @@ class PyPackageLayoutInfoProvider( ipypackagelayoutinfoprovider.IPyPackageLayoutInfoProvider, service.Service ): PACKAGE_NAME_CACHE_KEY = "PyPackageLayoutInfoProviderPackageName" + FILE_OPERATION_AUTHOR = ifileeditor.FileOperationAuthor( + id="PyPackageLayoutInfoProvider" + ) def __init__( - self, file_manager: ifilemanager.IFileManager, cache: icache.ICache + self, file_editor: ifileeditor.IFileEditor, cache: icache.ICache ) -> None: - self.file_manager = file_manager + self.file_editor = file_editor self.cache = cache async def _get_package_name(self, package_dir_path: pathlib.Path) -> str: @@ -43,12 +46,13 @@ async def _get_package_name(self, package_dir_path: pathlib.Path) -> str: except icache.CacheMissException: ... - package_def_file_content = await self.file_manager.get_content( - file_path=package_def_file - ) - package_def_file_version = await self.file_manager.get_file_version( - file_path=package_def_file - ) + async with self.file_editor.session( + author=self.FILE_OPERATION_AUTHOR + ) as session: + async with session.read_file(file_path=package_def_file) as file_info: + package_def_file_content: str = file_info.content + package_def_file_version: str = file_info.version + try: package_def_dict = tomlkit.loads(package_def_file_content) except tomlkit.exceptions.ParseError as exception: @@ -82,7 +86,7 @@ async def get_package_layout( except ConfigParseError as exception: raise ipypackagelayoutinfoprovider.FailedToGetPackageLayout( exception.message - ) + ) from exception if (package_dir_path / package_name).exists(): return ipypackagelayoutinfoprovider.PyPackageLayout.FLAT diff --git a/extensions/fine_python_package_info/pyproject.toml b/extensions/fine_python_package_info/pyproject.toml index 92aab9f..baaa62d 100644 --- a/extensions/fine_python_package_info/pyproject.toml +++ b/extensions/fine_python_package_info/pyproject.toml @@ -18,3 +18,7 @@ finecode_dev_common_preset = { path = "../../finecode_dev_common_preset", editab finecode = { path = "../../", editable = true } finecode_extension_runner = { path = "../../finecode_extension_runner", editable = true } finecode_extension_api = { path = "../../finecode_extension_api", editable = true } +finecode_jsonrpc = { path = "../../finecode_jsonrpc", editable = true } +finecode_builtin_handlers = { path = "../../finecode_builtin_handlers", editable = true } +fine_python_lint = { path = "../../presets/fine_python_lint", editable = true } +fine_python_format = { path = "../../presets/fine_python_format", editable = true } diff --git a/extensions/fine_python_pip/pyproject.toml b/extensions/fine_python_pip/pyproject.toml index db39e9a..45f204b 100644 --- a/extensions/fine_python_pip/pyproject.toml +++ b/extensions/fine_python_pip/pyproject.toml @@ -18,3 +18,7 @@ finecode_dev_common_preset = { path = "../../finecode_dev_common_preset", editab finecode = { path = "../../", editable = true } finecode_extension_runner = { path = "../../finecode_extension_runner", editable = true } finecode_extension_api = { path = "../../finecode_extension_api", editable = true } +finecode_jsonrpc = { path = "../../finecode_jsonrpc", editable = true } +finecode_builtin_handlers = { path = "../../finecode_builtin_handlers", editable = true } +fine_python_lint = { path = "../../presets/fine_python_lint", editable = true } +fine_python_format = { path = "../../presets/fine_python_format", editable = true } diff --git a/extensions/fine_python_pyrefly/fine_python_pyrefly/__init__.py b/extensions/fine_python_pyrefly/fine_python_pyrefly/__init__.py index 2b0ee58..26d2e7b 100644 --- a/extensions/fine_python_pyrefly/fine_python_pyrefly/__init__.py +++ b/extensions/fine_python_pyrefly/fine_python_pyrefly/__init__.py @@ -1,6 +1,6 @@ -from .lint_handler import PyreflyLintHandler, PyreflyLintHandlerConfig +from .lint_files_handler import PyreflyLintFilesHandler, PyreflyLintFilesHandlerConfig __all__ = [ - "PyreflyLintHandler", - "PyreflyLintHandlerConfig", + "PyreflyLintFilesHandler", + "PyreflyLintFilesHandlerConfig", ] diff --git a/extensions/fine_python_pyrefly/fine_python_pyrefly/lint_handler.py b/extensions/fine_python_pyrefly/fine_python_pyrefly/lint_files_handler.py similarity index 75% rename from extensions/fine_python_pyrefly/fine_python_pyrefly/lint_handler.py rename to extensions/fine_python_pyrefly/fine_python_pyrefly/lint_files_handler.py index e996f7b..7f11c6d 100644 --- a/extensions/fine_python_pyrefly/fine_python_pyrefly/lint_handler.py +++ b/extensions/fine_python_pyrefly/fine_python_pyrefly/lint_files_handler.py @@ -6,24 +6,26 @@ from pathlib import Path from finecode_extension_api import code_action -from finecode_extension_api.actions import lint as lint_action +from finecode_extension_api.actions import lint_files as lint_files_action from finecode_extension_api.interfaces import ( icache, icommandrunner, ilogger, - ifilemanager, + ifileeditor, iprojectfileclassifier, iextensionrunnerinfoprovider, ) @dataclasses.dataclass -class PyreflyLintHandlerConfig(code_action.ActionHandlerConfig): +class PyreflyLintFilesHandlerConfig(code_action.ActionHandlerConfig): python_version: str | None = None -class PyreflyLintHandler( - code_action.ActionHandler[lint_action.LintAction, PyreflyLintHandlerConfig] +class PyreflyLintFilesHandler( + code_action.ActionHandler[ + lint_files_action.LintFilesAction, PyreflyLintFilesHandlerConfig + ] ): """ NOTE: pyrefly currently can check only saved files, not file content provided by @@ -32,13 +34,16 @@ class PyreflyLintHandler( """ CACHE_KEY = "PyreflyLinter" + FILE_OPERATION_AUTHOR = ifileeditor.FileOperationAuthor( + id="PyreflyLinter" + ) def __init__( self, - config: PyreflyLintHandlerConfig, + config: PyreflyLintFilesHandlerConfig, cache: icache.ICache, logger: ilogger.ILogger, - file_manager: ifilemanager.IFileManager, + file_editor: ifileeditor.IFileEditor, command_runner: icommandrunner.ICommandRunner, project_file_classifier: iprojectfileclassifier.IProjectFileClassifier, extension_runner_info_provider: iextensionrunnerinfoprovider.IExtensionRunnerInfoProvider, @@ -46,36 +51,42 @@ def __init__( self.config = config self.cache = cache self.logger = logger - self.file_manager = file_manager + self.file_editor = file_editor self.command_runner = command_runner self.project_file_classifier = project_file_classifier self.extension_runner_info_provider = extension_runner_info_provider self.pyrefly_bin_path = Path(sys.executable).parent / "pyrefly" - async def run_on_single_file(self, file_path: Path) -> lint_action.LintRunResult: + async def run_on_single_file( + self, file_path: Path + ) -> lint_files_action.LintFilesRunResult: messages = {} try: cached_lint_messages = await self.cache.get_file_cache( file_path, self.CACHE_KEY ) messages[str(file_path)] = cached_lint_messages - return lint_action.LintRunResult(messages=messages) + return lint_files_action.LintFilesRunResult(messages=messages) except icache.CacheMissException: pass + + async with self.file_editor.session( + author=self.FILE_OPERATION_AUTHOR + ) as session: + file_version = await session.read_file_version(file_path) - file_version = await self.file_manager.get_file_version(file_path) lint_messages = await self.run_pyrefly_lint_on_single_file(file_path) messages[str(file_path)] = lint_messages await self.cache.save_file_cache( file_path, file_version, self.CACHE_KEY, lint_messages ) - return lint_action.LintRunResult(messages=messages) + return lint_files_action.LintFilesRunResult(messages=messages) async def run( self, - payload: lint_action.LintRunPayload, + payload: lint_files_action.LintFilesRunPayload, run_context: code_action.RunActionWithPartialResultsContext, ) -> None: file_paths = [file_path async for file_path in payload] @@ -89,9 +100,9 @@ async def run( async def run_pyrefly_lint_on_single_file( self, file_path: Path, - ) -> list[lint_action.LintMessage]: + ) -> list[lint_files_action.LintMessage]: """Run pyrefly type checking on a single file""" - lint_messages: list[lint_action.LintMessage] = [] + lint_messages: list[lint_files_action.LintMessage] = [] try: # project file classifier caches result, we can just get it each time again @@ -122,7 +133,7 @@ async def run_pyrefly_lint_on_single_file( ) # --skip-interpreter-query isn't used because it is not compatible - # with --python-interpreter parameter + # with --python-interpreter-path parameter # --disable-search-path-heuristics=true isn't used because pyrefly doesn't # recognize some imports without it. For example, it cannot resolve relative # imports in root __init__.py . Needs to be investigated @@ -132,7 +143,7 @@ async def run_pyrefly_lint_on_single_file( "--output-format=json", # path to python interpreter because pyrefly resolves .pth files only if # it is provided - f"--python-interpreter='{str(interpreter_path)}'", + f"--python-interpreter-path='{str(interpreter_path)}'", ] if self.config.python_version is not None: @@ -153,15 +164,15 @@ async def run_pyrefly_lint_on_single_file( for error in pyrefly_results["errors"]: lint_message = map_pyrefly_error_to_lint_message(error) lint_messages.append(lint_message) - except json.JSONDecodeError: + except json.JSONDecodeError as exception: raise code_action.ActionFailedException( f"Output of pyrefly is not json: {output}" - ) + ) from exception return lint_messages -def map_pyrefly_error_to_lint_message(error: dict) -> lint_action.LintMessage: +def map_pyrefly_error_to_lint_message(error: dict) -> lint_files_action.LintMessage: """Map a pyrefly error to a lint message""" # Extract line/column info (pyrefly uses 1-based indexing) start_line = error["line"] @@ -172,12 +183,12 @@ def map_pyrefly_error_to_lint_message(error: dict) -> lint_action.LintMessage: # Determine severity based on error type error_code = str(error.get("code", "")) code_description = error.get("name", "") - severity = lint_action.LintMessageSeverity.ERROR + severity = lint_files_action.LintMessageSeverity.ERROR - return lint_action.LintMessage( - range=lint_action.Range( - start=lint_action.Position(line=start_line, character=start_column), - end=lint_action.Position(line=end_line, character=end_column), + return lint_files_action.LintMessage( + range=lint_files_action.Range( + start=lint_files_action.Position(line=start_line, character=start_column), + end=lint_files_action.Position(line=end_line, character=end_column), ), message=error.get("description", ""), code=error_code, diff --git a/extensions/fine_python_pyrefly/pyproject.toml b/extensions/fine_python_pyrefly/pyproject.toml index 311a799..e019907 100644 --- a/extensions/fine_python_pyrefly/pyproject.toml +++ b/extensions/fine_python_pyrefly/pyproject.toml @@ -18,3 +18,7 @@ finecode_dev_common_preset = { path = "../../finecode_dev_common_preset", editab finecode = { path = "../../", editable = true } finecode_extension_runner = { path = "../../finecode_extension_runner", editable = true } finecode_extension_api = { path = "../../finecode_extension_api", editable = true } +finecode_jsonrpc = { path = "../../finecode_jsonrpc", editable = true } +finecode_builtin_handlers = { path = "../../finecode_builtin_handlers", editable = true } +fine_python_lint = { path = "../../presets/fine_python_lint", editable = true } +fine_python_format = { path = "../../presets/fine_python_format", editable = true } diff --git a/extensions/fine_python_ruff/fine_python_ruff/__init__.py b/extensions/fine_python_ruff/fine_python_ruff/__init__.py index 3e77465..c07966f 100644 --- a/extensions/fine_python_ruff/fine_python_ruff/__init__.py +++ b/extensions/fine_python_ruff/fine_python_ruff/__init__.py @@ -1,9 +1,9 @@ -from .format_handler import RuffFormatHandler, RuffFormatHandlerConfig -from .lint_handler import RuffLintHandler, RuffLintHandlerConfig +from .format_files_handler import RuffFormatFilesHandler, RuffFormatFilesHandlerConfig +from .lint_files_handler import RuffLintFilesHandler, RuffLintFilesHandlerConfig __all__ = [ - "RuffFormatHandler", - "RuffFormatHandlerConfig", - "RuffLintHandler", - "RuffLintHandlerConfig", + "RuffFormatFilesHandler", + "RuffFormatFilesHandlerConfig", + "RuffLintFilesHandler", + "RuffLintFilesHandlerConfig", ] diff --git a/extensions/fine_python_ruff/fine_python_ruff/format_handler.py b/extensions/fine_python_ruff/fine_python_ruff/format_files_handler.py similarity index 71% rename from extensions/fine_python_ruff/fine_python_ruff/format_handler.py rename to extensions/fine_python_ruff/fine_python_ruff/format_files_handler.py index ca3f0e8..82ecd48 100644 --- a/extensions/fine_python_ruff/fine_python_ruff/format_handler.py +++ b/extensions/fine_python_ruff/fine_python_ruff/format_files_handler.py @@ -12,7 +12,7 @@ from typing import override from finecode_extension_api import code_action -from finecode_extension_api.actions import format as format_action +from finecode_extension_api.actions import format_files as format_files_action from finecode_extension_api.interfaces import ( icache, icommandrunner, @@ -22,7 +22,7 @@ @dataclasses.dataclass -class RuffFormatHandlerConfig(code_action.ActionHandlerConfig): +class RuffFormatFilesHandlerConfig(code_action.ActionHandlerConfig): line_length: int = 88 indent_width: int = 4 quote_style: str = "double" # "double" or "single" @@ -30,14 +30,14 @@ class RuffFormatHandlerConfig(code_action.ActionHandlerConfig): preview: bool = False -class RuffFormatHandler( - code_action.ActionHandler[format_action.FormatAction, RuffFormatHandlerConfig] +class RuffFormatFilesHandler( + code_action.ActionHandler[ + format_files_action.FormatFilesAction, RuffFormatFilesHandlerConfig + ] ): - CACHE_KEY = "RuffFormatter" - def __init__( self, - config: RuffFormatHandlerConfig, + config: RuffFormatFilesHandlerConfig, extension_runner_info_provider: iextensionrunnerinfoprovider.IExtensionRunnerInfoProvider, logger: ilogger.ILogger, cache: icache.ICache, @@ -54,40 +54,29 @@ def __init__( @override async def run( self, - payload: format_action.FormatRunPayload, - run_context: format_action.FormatRunContext, - ) -> format_action.FormatRunResult: - result_by_file_path: dict[Path, format_action.FormatRunFileResult] = {} + payload: format_files_action.FormatFilesRunPayload, + run_context: format_files_action.FormatFilesRunContext, + ) -> format_files_action.FormatFilesRunResult: + result_by_file_path: dict[Path, format_files_action.FormatRunFileResult] = {} for file_path in payload.file_paths: file_content, file_version = run_context.file_info_by_path[file_path] - try: - new_file_content = await self.cache.get_file_cache( - file_path, self.CACHE_KEY - ) - result_by_file_path[file_path] = format_action.FormatRunFileResult( - changed=False, code=new_file_content - ) - continue - except icache.CacheMissException: - pass new_file_content, file_changed = await self.format_one( file_path, file_content ) # save for next handlers - run_context.file_info_by_path[file_path] = format_action.FileInfo( + run_context.file_info_by_path[file_path] = format_files_action.FileInfo( new_file_content, file_version ) - await self.cache.save_file_cache( - file_path, file_version, self.CACHE_KEY, new_file_content - ) - result_by_file_path[file_path] = format_action.FormatRunFileResult( + result_by_file_path[file_path] = format_files_action.FormatRunFileResult( changed=file_changed, code=new_file_content ) - return format_action.FormatRunResult(result_by_file_path=result_by_file_path) + return format_files_action.FormatFilesRunResult( + result_by_file_path=result_by_file_path + ) async def format_one(self, file_path: Path, file_content: str) -> tuple[str, bool]: """Format a single file using ruff format""" diff --git a/extensions/fine_python_ruff/fine_python_ruff/lint_handler.py b/extensions/fine_python_ruff/fine_python_ruff/lint_files_handler.py similarity index 71% rename from extensions/fine_python_ruff/fine_python_ruff/lint_handler.py rename to extensions/fine_python_ruff/fine_python_ruff/lint_files_handler.py index 41c1706..3e8417f 100644 --- a/extensions/fine_python_ruff/fine_python_ruff/lint_handler.py +++ b/extensions/fine_python_ruff/fine_python_ruff/lint_files_handler.py @@ -6,17 +6,17 @@ from pathlib import Path from finecode_extension_api import code_action -from finecode_extension_api.actions import lint as lint_action +from finecode_extension_api.actions import lint_files as lint_files_action from finecode_extension_api.interfaces import ( icache, icommandrunner, ilogger, - ifilemanager, + ifileeditor, ) @dataclasses.dataclass -class RuffLintHandlerConfig(code_action.ActionHandlerConfig): +class RuffLintFilesHandlerConfig(code_action.ActionHandlerConfig): line_length: int = 88 target_version: str = "py38" select: list[str] | None = None # Rules to enable @@ -25,51 +25,61 @@ class RuffLintHandlerConfig(code_action.ActionHandlerConfig): preview: bool = False -class RuffLintHandler( - code_action.ActionHandler[lint_action.LintAction, RuffLintHandlerConfig] +class RuffLintFilesHandler( + code_action.ActionHandler[ + lint_files_action.LintFilesAction, RuffLintFilesHandlerConfig + ] ): CACHE_KEY = "RuffLinter" + FILE_OPERATION_AUTHOR = ifileeditor.FileOperationAuthor(id="RuffLinterAstProvider") def __init__( self, - config: RuffLintHandlerConfig, + config: RuffLintFilesHandlerConfig, cache: icache.ICache, logger: ilogger.ILogger, - file_manager: ifilemanager.IFileManager, + file_editor: ifileeditor.IFileEditor, command_runner: icommandrunner.ICommandRunner, ) -> None: self.config = config self.cache = cache self.logger = logger - self.file_manager = file_manager + self.file_editor = file_editor self.command_runner = command_runner self.ruff_bin_path = Path(sys.executable).parent / "ruff" - async def run_on_single_file(self, file_path: Path) -> lint_action.LintRunResult: + async def run_on_single_file( + self, file_path: Path + ) -> lint_files_action.LintFilesRunResult: messages = {} try: cached_lint_messages = await self.cache.get_file_cache( file_path, self.CACHE_KEY ) messages[str(file_path)] = cached_lint_messages - return lint_action.LintRunResult(messages=messages) + return lint_files_action.LintFilesRunResult(messages=messages) except icache.CacheMissException: pass - file_version = await self.file_manager.get_file_version(file_path) - file_content = await self.file_manager.get_content(file_path) + async with self.file_editor.session( + author=self.FILE_OPERATION_AUTHOR + ) as session: + async with session.read_file(file_path=file_path) as file_info: + file_content: str = file_info.content + file_version: str = file_info.version + lint_messages = await self.run_ruff_lint_on_single_file(file_path, file_content) messages[str(file_path)] = lint_messages await self.cache.save_file_cache( file_path, file_version, self.CACHE_KEY, lint_messages ) - return lint_action.LintRunResult(messages=messages) + return lint_files_action.LintFilesRunResult(messages=messages) async def run( self, - payload: lint_action.LintRunPayload, + payload: lint_files_action.LintFilesRunPayload, run_context: code_action.RunActionWithPartialResultsContext, ) -> None: file_paths = [file_path async for file_path in payload] @@ -84,9 +94,9 @@ async def run_ruff_lint_on_single_file( self, file_path: Path, file_content: str, - ) -> list[lint_action.LintMessage]: + ) -> list[lint_files_action.LintMessage]: """Run ruff linting on a single file""" - lint_messages: list[lint_action.LintMessage] = [] + lint_messages: list[lint_files_action.LintMessage] = [] # Build ruff check command cmd = [ @@ -135,7 +145,9 @@ async def run_ruff_lint_on_single_file( return lint_messages -def map_ruff_violation_to_lint_message(violation: dict) -> lint_action.LintMessage: +def map_ruff_violation_to_lint_message( + violation: dict, +) -> lint_files_action.LintMessage: """Map a ruff violation to a lint message""" location = violation.get("location", {}) end_location = violation.get("end_location", {}) @@ -150,16 +162,16 @@ def map_ruff_violation_to_lint_message(violation: dict) -> lint_action.LintMessa code = violation.get("code", "") code_description = violation.get("url", "") if code.startswith(("E", "F")): # Error codes - severity = lint_action.LintMessageSeverity.ERROR + severity = lint_files_action.LintMessageSeverity.ERROR elif code.startswith("W"): # Warning codes - severity = lint_action.LintMessageSeverity.WARNING + severity = lint_files_action.LintMessageSeverity.WARNING else: - severity = lint_action.LintMessageSeverity.INFO + severity = lint_files_action.LintMessageSeverity.INFO - return lint_action.LintMessage( - range=lint_action.Range( - start=lint_action.Position(line=start_line, character=start_column), - end=lint_action.Position(line=end_line, character=end_column), + return lint_files_action.LintMessage( + range=lint_files_action.Range( + start=lint_files_action.Position(line=start_line, character=start_column), + end=lint_files_action.Position(line=end_line, character=end_column), ), message=violation.get("message", ""), code=code, diff --git a/extensions/fine_python_ruff/pyproject.toml b/extensions/fine_python_ruff/pyproject.toml index 8ab8116..cb237de 100644 --- a/extensions/fine_python_ruff/pyproject.toml +++ b/extensions/fine_python_ruff/pyproject.toml @@ -18,3 +18,5 @@ finecode_dev_common_preset = { path = "../../finecode_dev_common_preset", editab finecode = { path = "../../", editable = true } finecode_extension_runner = { path = "../../finecode_extension_runner", editable = true } finecode_extension_api = { path = "../../finecode_extension_api", editable = true } +finecode_jsonrpc = { path = "../../finecode_jsonrpc", editable = true } +finecode_builtin_handlers = { path = "../../finecode_builtin_handlers", editable = true } diff --git a/extensions/fine_python_virtualenv/pyproject.toml b/extensions/fine_python_virtualenv/pyproject.toml index 96e4781..cfbbeee 100644 --- a/extensions/fine_python_virtualenv/pyproject.toml +++ b/extensions/fine_python_virtualenv/pyproject.toml @@ -21,3 +21,7 @@ finecode_dev_common_preset = { path = "../../finecode_dev_common_preset", editab finecode = { path = "../../", editable = true } finecode_extension_runner = { path = "../../finecode_extension_runner", editable = true } finecode_extension_api = { path = "../../finecode_extension_api", editable = true } +finecode_jsonrpc = { path = "../../finecode_jsonrpc", editable = true } +finecode_builtin_handlers = { path = "../../finecode_builtin_handlers", editable = true } +fine_python_lint = { path = "../../presets/fine_python_lint", editable = true } +fine_python_format = { path = "../../presets/fine_python_format", editable = true } diff --git a/finecode_builtin_handlers/pyproject.toml b/finecode_builtin_handlers/pyproject.toml index 2edaed5..10a21ba 100644 --- a/finecode_builtin_handlers/pyproject.toml +++ b/finecode_builtin_handlers/pyproject.toml @@ -15,6 +15,10 @@ finecode_dev_common_preset = { path = "../finecode_dev_common_preset", editable finecode = { path = "../", editable = true } finecode_extension_runner = { path = "../finecode_extension_runner", editable = true } finecode_extension_api = { path = "../finecode_extension_api", editable = true } +finecode_jsonrpc = { path = "../finecode_jsonrpc", editable = true } +finecode_builtin_handlers = { path = "../finecode_builtin_handlers", editable = true } +fine_python_lint = { path = "../presets/fine_python_lint", editable = true } +fine_python_format = { path = "../presets/fine_python_format", editable = true } [tool.finecode] presets = [{ source = "finecode_dev_common_preset" }] diff --git a/finecode_builtin_handlers/src/finecode_builtin_handlers/__init__.py b/finecode_builtin_handlers/src/finecode_builtin_handlers/__init__.py index d98bd89..d5ad0ee 100644 --- a/finecode_builtin_handlers/src/finecode_builtin_handlers/__init__.py +++ b/finecode_builtin_handlers/src/finecode_builtin_handlers/__init__.py @@ -1,7 +1,10 @@ """FineCode Built-in handlers.""" +from .clean_finecode_logs import CleanFinecodeLogsHandler from .dump_config import DumpConfigHandler from .dump_config_save import DumpConfigSaveHandler +from .format import FormatHandler +from .lint import LintHandler from .prepare_envs_install_deps import PrepareEnvsInstallDepsHandler from .prepare_envs_read_configs import PrepareEnvsReadConfigsHandler from .prepare_runners_install_runner_and_presets import ( @@ -10,7 +13,10 @@ from .prepare_runners_read_configs import PrepareRunnersReadConfigsHandler __all__ = [ + "CleanFinecodeLogsHandler", "DumpConfigHandler", + "FormatHandler", + "LintHandler", "PrepareEnvsInstallDepsHandler", "PrepareEnvsReadConfigsHandler", "PrepareRunnersInstallRunnerAndPresetsHandler", diff --git a/finecode_builtin_handlers/src/finecode_builtin_handlers/clean_finecode_logs.py b/finecode_builtin_handlers/src/finecode_builtin_handlers/clean_finecode_logs.py new file mode 100644 index 0000000..2f1c198 --- /dev/null +++ b/finecode_builtin_handlers/src/finecode_builtin_handlers/clean_finecode_logs.py @@ -0,0 +1,46 @@ +import dataclasses + +from finecode_extension_api import code_action +from finecode_extension_api.actions import ( + clean_finecode_logs as clean_finecode_logs_action, +) +from finecode_extension_api.interfaces import ilogger, iextensionrunnerinfoprovider + + +@dataclasses.dataclass +class CleanFinecodeLogsHandlerConfig(code_action.ActionHandlerConfig): ... + + +class CleanFinecodeLogsHandler( + code_action.ActionHandler[ + clean_finecode_logs_action.CleanFinecodeLogsAction, + CleanFinecodeLogsHandlerConfig, + ] +): + def __init__( + self, + logger: ilogger.ILogger, + extension_runner_info_provider: iextensionrunnerinfoprovider.IExtensionRunnerInfoProvider, + ) -> None: + self.logger = logger + self.extension_runner_info_provider = extension_runner_info_provider + + async def run( + self, + payload: clean_finecode_logs_action.CleanFinecodeLogsRunPayload, + run_context: clean_finecode_logs_action.CleanFinecodeLogsRunContext, + ) -> clean_finecode_logs_action.CleanFinecodeLogsRunResult: + venv_dir_path = self.extension_runner_info_provider.get_current_venv_dir_path() + logs_dir_path = venv_dir_path / "logs" + errors: list[str] = [] + + # use file manager instead? + for log_file_path in logs_dir_path.glob("*.log"): + try: + log_file_path.unlink() + except Exception as exception: + errors += str(exception) + + self.logger.info(f"Deleted {log_file_path}") + + return clean_finecode_logs_action.CleanFinecodeLogsRunResult(errors=errors) diff --git a/finecode_builtin_handlers/src/finecode_builtin_handlers/dump_config_save.py b/finecode_builtin_handlers/src/finecode_builtin_handlers/dump_config_save.py index 2252f39..2d531c2 100644 --- a/finecode_builtin_handlers/src/finecode_builtin_handlers/dump_config_save.py +++ b/finecode_builtin_handlers/src/finecode_builtin_handlers/dump_config_save.py @@ -4,7 +4,7 @@ from finecode_extension_api import code_action from finecode_extension_api.actions import dump_config as dump_config_action -from finecode_extension_api.interfaces import ifilemanager +from finecode_extension_api.interfaces import ifilemanager, ifileeditor @dataclasses.dataclass @@ -16,11 +16,17 @@ class DumpConfigSaveHandler( dump_config_action.DumpConfigAction, DumpConfigSaveHandlerConfig ] ): + FILE_OPERATION_AUTHOR = ifileeditor.FileOperationAuthor( + id="DumpConfigSaveHandler" + ) + def __init__( self, file_manager: ifilemanager.IFileManager, + file_editor: ifileeditor.IFileEditor ) -> None: self.file_manager = file_manager + self.file_editor = file_editor async def run( self, @@ -31,9 +37,12 @@ async def run( target_file_dir_path = payload.target_file_path.parent await self.file_manager.create_dir(dir_path=target_file_dir_path) - await self.file_manager.save_file( - file_path=payload.target_file_path, file_content=raw_config_str - ) + async with self.file_editor.session( + author=self.FILE_OPERATION_AUTHOR + ) as session: + await session.save_file( + file_path=payload.target_file_path, file_content=raw_config_str + ) return dump_config_action.DumpConfigRunResult( config_dump=run_context.raw_config_dump diff --git a/finecode_builtin_handlers/src/finecode_builtin_handlers/format.py b/finecode_builtin_handlers/src/finecode_builtin_handlers/format.py new file mode 100644 index 0000000..ae61217 --- /dev/null +++ b/finecode_builtin_handlers/src/finecode_builtin_handlers/format.py @@ -0,0 +1,155 @@ +import asyncio +import dataclasses +import pathlib + +from finecode_extension_api import code_action +from finecode_extension_api.actions import ( + format as format_action, + format_files as format_files_action, + list_project_files_by_lang as list_project_files_by_lang_action, + group_project_files_by_lang as group_project_files_by_lang_action, +) +from finecode_extension_api.interfaces import ( + iactionrunner, + ifileeditor, + ilogger, +) + + +@dataclasses.dataclass +class FormatHandlerConfig(code_action.ActionHandlerConfig): ... + + +class FormatHandler( + code_action.ActionHandler[format_action.FormatAction, FormatHandlerConfig] +): + def __init__( + self, + action_runner: iactionrunner.IActionRunner, + logger: ilogger.ILogger, + file_editor: ifileeditor.IFileEditor, + ) -> None: + self.action_runner = action_runner + self.file_editor = file_editor + self.logger = logger + + async def run( + self, + payload: format_action.FormatRunPayload, + run_context: format_action.FormatRunContext, + ) -> format_action.FormatRunResult: + files_by_lang: dict[str, list[pathlib.Path]] = {} + + # first get languages for which formatters are available, they change rarely + # only on project config change + all_actions = self.action_runner.get_actions_names() + lint_files_prefix = "format_files_" + lint_files_actions = [ + action_name + for action_name in all_actions + if action_name.startswith(lint_files_prefix) + ] + # TODO: ordered set? + # TODO: cache and update on project config change + langs_supported_by_lint = list( + set( + [ + action_name[len(lint_files_prefix) :] + for action_name in lint_files_actions + ] + ) + ) + run_meta = run_context.meta + + if payload.target == format_action.FormatTarget.PROJECT: + if ( + run_meta.dev_env == code_action.DevEnv.IDE + and run_meta.trigger == code_action.RunActionTrigger.SYSTEM + ): + # performance optimization: if IDE automatically(=`trigger == SYSTEM`) + # tries to lint the whole project, lint only files owned by IDE(usually + # these are opened files). + # In future it could be improved by linting opened files + dependencies + # or e.g. files changed according to git + dependencies. + files_to_lint: list[pathlib.Path] = self.file_editor.get_opened_files() + group_project_files_action = self.action_runner.get_action_by_name( + "group_project_files_by_lang" + ) + group_project_files_by_lang_payload = group_project_files_by_lang_action.GroupProjectFilesByLangRunPayload( + file_paths=files_to_lint, langs=langs_supported_by_lint + ) + files_by_lang_result = await self.action_runner.run_action( + action=group_project_files_action, + payload=group_project_files_by_lang_payload, + meta=run_meta + ) + files_by_lang = files_by_lang_result.files_by_lang + else: + # not automatic check of IDE, lint the whole project. + # Instead of getting all files in the project and then grouping them by + # language, use `list_project_files_by_lang_action` action which returns + # only files with supported languages + list_project_file_by_lang_action_instance = ( + self.action_runner.get_action_by_name("list_project_files_by_lang") + ) + list_project_files_by_lang_payload = ( + list_project_files_by_lang_action.ListProjectFilesByLangRunPayload( + langs=langs_supported_by_lint + ) + ) + files_by_lang_result = await self.action_runner.run_action( + action=list_project_file_by_lang_action_instance, + payload=list_project_files_by_lang_payload, + meta=run_meta + ) + files_by_lang = files_by_lang_result.files_by_lang + + else: + # lint target are files, lint them + files_to_lint = payload.file_paths + group_project_files_by_lang_action_instance = ( + self.action_runner.get_action_by_name("group_project_files_by_lang") + ) + group_project_files_by_lang_payload = ( + group_project_files_by_lang_action.GroupProjectFilesByLangRunPayload( + file_paths=files_to_lint, langs=langs_supported_by_lint + ) + ) + files_by_lang_result = await self.action_runner.run_action( + action=group_project_files_by_lang_action_instance, + payload=group_project_files_by_lang_payload, + meta=run_meta + ) + files_by_lang = files_by_lang_result.files_by_lang + + # TODO: handle errors + lint_tasks = [] + try: + async with asyncio.TaskGroup() as tg: + for lang, lang_files in files_by_lang.items(): + # TODO: handle errors + # TODO: handle KeyError? + action = self.action_runner.get_action_by_name( + lint_files_prefix + lang + ) + lint_files_payload = format_files_action.FormatFilesRunPayload( + file_paths=lang_files, save=payload.save + ) + lint_task = tg.create_task( + self.action_runner.run_action( + action=action, payload=lint_files_payload, meta=run_meta + ) + ) + lint_tasks.append(lint_task) + except ExceptionGroup as eg: + error_str = ". ".join([str(exception) for exception in eg.exceptions]) + raise code_action.ActionFailedException(error_str) + + lint_results = [task.result() for task in lint_tasks] + if len(lint_results) > 0: + result = format_action.FormatRunResult(result_by_file_path={}) + for subresult in lint_results: + result.update(subresult) + return result + else: + return format_action.FormatRunResult(result_by_file_path={}) diff --git a/finecode_builtin_handlers/src/finecode_builtin_handlers/lint.py b/finecode_builtin_handlers/src/finecode_builtin_handlers/lint.py new file mode 100644 index 0000000..00ae1ea --- /dev/null +++ b/finecode_builtin_handlers/src/finecode_builtin_handlers/lint.py @@ -0,0 +1,121 @@ +import asyncio +import dataclasses +import pathlib + +from finecode_extension_api import code_action +from finecode_extension_api.actions import ( + lint as lint_action, + lint_files as lint_files_action, + list_project_files_by_lang as list_project_files_by_lang_action, + group_project_files_by_lang as group_project_files_by_lang_action +) +from finecode_extension_api.interfaces import ( + iactionrunner, + ifileeditor, + ilogger, +) + + +@dataclasses.dataclass +class LintHandlerConfig(code_action.ActionHandlerConfig): ... + + +class LintHandler( + code_action.ActionHandler[ + lint_action.LintAction, LintHandlerConfig + ] +): + def __init__( + self, action_runner: iactionrunner.IActionRunner, logger: ilogger.ILogger, file_editor: ifileeditor.IFileEditor + ) -> None: + self.action_runner = action_runner + self.file_editor = file_editor + self.logger = logger + + async def run( + self, + payload: lint_action.LintRunPayload, + run_context: lint_action.LintRunContext, + ) -> lint_action.LintRunResult: + # files_to_lint: list[pathlib.Path] = [] + files_by_lang: dict[str, list[pathlib.Path]] = {} + + # first get languages for which linters are available, they change rarely + # only on project config change + all_actions = self.action_runner.get_actions_names() + lint_files_prefix = 'lint_files_' + lint_files_actions = [action_name for action_name in all_actions if action_name.startswith(lint_files_prefix)] + # TODO: ordered set? + # TODO: cache and update on project config change + langs_supported_by_lint = list(set([action_name[len(lint_files_prefix):] for action_name in lint_files_actions])) + run_meta = run_context.meta + + if payload.target == lint_action.LintTarget.PROJECT: + if run_meta.dev_env == code_action.DevEnv.IDE and run_meta.trigger == code_action.RunActionTrigger.SYSTEM: + # performance optimization: if IDE automatically(=`trigger == SYSTEM`) + # tries to lint the whole project, lint only files owned by IDE(usually + # these are opened files). + # In future it could be improved by linting opened files + dependencies + # or e.g. files changed according to git + dependencies. + files_to_lint: list[pathlib.Path] = self.file_editor.get_opened_files() + group_project_files_action = self.action_runner.get_action_by_name('group_project_files_by_lang') + group_project_files_by_lang_payload = group_project_files_by_lang_action.GroupProjectFilesByLangRunPayload(file_paths=files_to_lint, langs=langs_supported_by_lint) + files_by_lang_result = await self.action_runner.run_action( + action=group_project_files_action, + payload=group_project_files_by_lang_payload, + meta=run_meta + ) + files_by_lang = files_by_lang_result.files_by_lang + else: + # not automatic check of IDE, lint the whole project. + # Instead of getting all files in the project and then grouping them by + # language, use `list_project_files_by_lang_action` action which returns + # only files with supported languages + list_project_file_by_lang_action_instance = self.action_runner.get_action_by_name('list_project_files_by_lang') + list_project_files_by_lang_payload = list_project_files_by_lang_action.ListProjectFilesByLangRunPayload(langs=langs_supported_by_lint) + files_by_lang_result = await self.action_runner.run_action( + action=list_project_file_by_lang_action_instance, + payload=list_project_files_by_lang_payload, + meta=run_meta + ) + files_by_lang = files_by_lang_result.files_by_lang + + else: + # lint target are files, lint them + files_to_lint = payload.file_paths + group_project_files_by_lang_action_instance = self.action_runner.get_action_by_name('group_project_files_by_lang') + group_project_files_by_lang_payload = group_project_files_by_lang_action.GroupProjectFilesByLangRunPayload(file_paths=files_to_lint, langs=langs_supported_by_lint) + files_by_lang_result = await self.action_runner.run_action( + action=group_project_files_by_lang_action_instance, + payload=group_project_files_by_lang_payload, + meta=run_meta + ) + files_by_lang = files_by_lang_result.files_by_lang + + # TODO: handle errors + lint_tasks = [] + try: + async with asyncio.TaskGroup() as tg: + for lang, lang_files in files_by_lang.items(): + # TODO: handle errors + # TODO: handle KeyError? + action = self.action_runner.get_action_by_name(lint_files_prefix + lang) + lint_files_payload = lint_files_action.LintFilesRunPayload(file_paths=lang_files) + lint_task = tg.create_task(self.action_runner.run_action( + action=action, + payload=lint_files_payload, + meta=run_meta + )) + lint_tasks.append(lint_task) + except ExceptionGroup as eg: + error_str = ". ".join([str(exception) for exception in eg.exceptions]) + raise code_action.ActionFailedException(error_str) + + lint_results = [task.result() for task in lint_tasks] + if len(lint_results) > 0: + result = lint_action.LintRunResult(messages={}) + for subresult in lint_results: + result.update(subresult) + return result + else: + return lint_action.LintRunResult(messages={}) diff --git a/finecode_builtin_handlers/src/finecode_builtin_handlers/prepare_envs_install_deps.py b/finecode_builtin_handlers/src/finecode_builtin_handlers/prepare_envs_install_deps.py index e7c46f4..39e199b 100644 --- a/finecode_builtin_handlers/src/finecode_builtin_handlers/prepare_envs_install_deps.py +++ b/finecode_builtin_handlers/src/finecode_builtin_handlers/prepare_envs_install_deps.py @@ -1,9 +1,8 @@ import asyncio import dataclasses -import itertools from finecode_extension_api import code_action -from finecode_extension_api.actions import prepare_envs as prepare_envs_action +from finecode_extension_api.actions import prepare_envs as prepare_envs_action, install_deps_in_env as install_deps_in_env_action from finecode_extension_api.interfaces import ( iactionrunner, ilogger, @@ -33,7 +32,9 @@ async def run( ) -> prepare_envs_action.PrepareEnvsRunResult: envs = payload.envs - install_deps_tasks: list[asyncio.Task] = [] + install_deps_in_env_action_instance = self.action_runner.get_action_by_name(name="install_deps_in_env") + install_deps_tasks: list[asyncio.Task[install_deps_in_env_action.InstallDepsInEnvRunResult]] = [] + run_meta = run_context.meta try: async with asyncio.TaskGroup() as tg: for env in envs: @@ -56,16 +57,19 @@ async def run( process_raw_deps( env_raw_deps, env_deps_config, dependencies, deps_groups ) + + install_deps_payload = install_deps_in_env_action.InstallDepsInEnvRunPayload( + env_name=env.name, + venv_dir_path=env.venv_dir_path, + project_dir_path=env.project_def_path.parent, + dependencies=[install_deps_in_env_action.Dependency(name=dep['name'], version_or_source=dep['version_or_source'], editable=dep['editable']) for dep in dependencies] + ) task = tg.create_task( self.action_runner.run_action( - name="install_deps_in_env", - payload={ - "env_name": env.name, - "venv_dir_path": env.venv_dir_path, - "project_dir_path": env.project_def_path.parent, - "dependencies": dependencies, - }, + action=install_deps_in_env_action_instance, + payload=install_deps_payload, + meta=run_meta ) ) install_deps_tasks.append(task) @@ -74,11 +78,9 @@ async def run( raise code_action.ActionFailedException(error_str) install_deps_results = [task.result() for task in install_deps_tasks] - errors: list[str] = list( - itertools.chain.from_iterable( - [result["errors"] for result in install_deps_results] - ) - ) + errors: list[str] = [] + for result in install_deps_results: + errors += result.errors return prepare_envs_action.PrepareEnvsRunResult(errors=errors) diff --git a/finecode_builtin_handlers/src/finecode_builtin_handlers/prepare_envs_read_configs.py b/finecode_builtin_handlers/src/finecode_builtin_handlers/prepare_envs_read_configs.py index 9eed259..9f34082 100644 --- a/finecode_builtin_handlers/src/finecode_builtin_handlers/prepare_envs_read_configs.py +++ b/finecode_builtin_handlers/src/finecode_builtin_handlers/prepare_envs_read_configs.py @@ -6,7 +6,6 @@ from finecode_extension_api import code_action from finecode_extension_api.actions import prepare_envs as prepare_envs_action from finecode_extension_api.interfaces import ( - iactionrunner, ilogger, iprojectinfoprovider, ) @@ -24,11 +23,9 @@ class PrepareEnvsReadConfigsHandler( ): def __init__( self, - action_runner: iactionrunner.IActionRunner, project_info_provider: iprojectinfoprovider.IProjectInfoProvider, logger: ilogger.ILogger, ) -> None: - self.action_runner = action_runner self.project_info_provider = project_info_provider self.logger = logger diff --git a/finecode_builtin_handlers/src/finecode_builtin_handlers/prepare_runners_install_runner_and_presets.py b/finecode_builtin_handlers/src/finecode_builtin_handlers/prepare_runners_install_runner_and_presets.py index a897be0..c9a71f6 100644 --- a/finecode_builtin_handlers/src/finecode_builtin_handlers/prepare_runners_install_runner_and_presets.py +++ b/finecode_builtin_handlers/src/finecode_builtin_handlers/prepare_runners_install_runner_and_presets.py @@ -1,10 +1,9 @@ import asyncio import dataclasses -import itertools import typing from finecode_extension_api import code_action -from finecode_extension_api.actions import prepare_runners as prepare_runners_action +from finecode_extension_api.actions import prepare_runners as prepare_runners_action, install_deps_in_env as install_deps_in_env_action from finecode_extension_api.interfaces import ( iactionrunner, ilogger, @@ -56,19 +55,23 @@ async def run( ) dependencies_by_env[env.name] = dependencies - install_deps_tasks: list[asyncio.Task] = [] + install_deps_in_env_action_instance = self.action_runner.get_action_by_name(name="install_deps_in_env") + install_deps_tasks: list[asyncio.Task[install_deps_in_env_action.InstallDepsInEnvRunResult]] = [] + run_meta = run_context.meta try: async with asyncio.TaskGroup() as tg: for env in envs: + install_deps_payload = install_deps_in_env_action.InstallDepsInEnvRunPayload( + env_name=env.name, + venv_dir_path=env.venv_dir_path, + project_dir_path=env.project_def_path.parent, + dependencies=[install_deps_in_env_action.Dependency(name=dep['name'], version_or_source=dep['version_or_source'], editable=dep['editable']) for dep in dependencies_by_env[env.name]] + ) task = tg.create_task( self.action_runner.run_action( - name="install_deps_in_env", - payload={ - "env_name": env.name, - "venv_dir_path": env.venv_dir_path, - "project_dir_path": env.project_def_path.parent, - "dependencies": dependencies_by_env[env.name], - }, + action=install_deps_in_env_action_instance, + payload=install_deps_payload, + meta=run_meta ) ) install_deps_tasks.append(task) @@ -88,11 +91,9 @@ async def run( raise code_action.StopActionRunWithResult(result=result) install_deps_results = [task.result() for task in install_deps_tasks] - errors: list[str] = list( - itertools.chain.from_iterable( - [result["errors"] for result in install_deps_results] - ) - ) + errors: list[str] = [] + for result in install_deps_results: + errors += result.errors result = prepare_runners_action.PrepareRunnersRunResult(errors=errors) return result diff --git a/finecode_builtin_handlers/src/finecode_builtin_handlers/prepare_runners_read_configs.py b/finecode_builtin_handlers/src/finecode_builtin_handlers/prepare_runners_read_configs.py index 7d77448..e54b7f1 100644 --- a/finecode_builtin_handlers/src/finecode_builtin_handlers/prepare_runners_read_configs.py +++ b/finecode_builtin_handlers/src/finecode_builtin_handlers/prepare_runners_read_configs.py @@ -6,7 +6,6 @@ from finecode_extension_api import code_action from finecode_extension_api.actions import prepare_runners as prepare_runners_action from finecode_extension_api.interfaces import ( - iactionrunner, ilogger, iprojectinfoprovider, ) @@ -25,11 +24,9 @@ class PrepareRunnersReadConfigsHandler( ): def __init__( self, - action_runner: iactionrunner.IActionRunner, project_info_provider: iprojectinfoprovider.IProjectInfoProvider, logger: ilogger.ILogger, ) -> None: - self.action_runner = action_runner self.project_info_provider = project_info_provider self.logger = logger diff --git a/finecode_dev_common_preset/src/finecode_dev_common_preset/preset.toml b/finecode_dev_common_preset/src/finecode_dev_common_preset/preset.toml index 1799cf3..b05e606 100644 --- a/finecode_dev_common_preset/src/finecode_dev_common_preset/preset.toml +++ b/finecode_dev_common_preset/src/finecode_dev_common_preset/preset.toml @@ -17,6 +17,8 @@ finecode_extension_runner = { path = "../../../finecode_extension_runner", edita [tool.finecode.env.dev_no_runtime.dependencies] finecode_extension_api = { path = "../../../finecode_extension_api", editable = true } finecode_extension_runner = { path = "../../../finecode_extension_runner", editable = true } +finecode_builtin_handlers = { path = "../../../finecode_builtin_handlers", editable = true } +fine_python_ast = { path = "../../../extensions/fine_python_ast", editable = true } fine_python_mypy = { path = "../../../extensions/fine_python_mypy", editable = true } fine_python_ruff = { path = "../../../extensions/fine_python_ruff", editable = true } fine_python_flake8 = { path = "../../../extensions/fine_python_flake8", editable = true } @@ -31,9 +33,9 @@ build-backend = "setuptools.build_meta" # TODO: recognize minimal python version automatically [[tool.finecode.action_handler]] -source = "fine_python_ruff.RuffLintHandler" +source = "fine_python_ruff.RuffLintFilesHandler" config.target_version = 'py311' [[tool.finecode.action_handler]] -source = "fine_python_pyrefly.PyreflyLintHandler" +source = "fine_python_pyrefly.PyreflyLintFilesHandler" config.python_version = '3.11' diff --git a/finecode_extension_api/src/finecode_extension_api/actions/clean_finecode_logs.py b/finecode_extension_api/src/finecode_extension_api/actions/clean_finecode_logs.py new file mode 100644 index 0000000..ddcbcf5 --- /dev/null +++ b/finecode_extension_api/src/finecode_extension_api/actions/clean_finecode_logs.py @@ -0,0 +1,38 @@ +import dataclasses + +from finecode_extension_api import code_action, textstyler + + +@dataclasses.dataclass +class CleanFinecodeLogsRunPayload(code_action.RunActionPayload): + ... + + +class CleanFinecodeLogsRunContext(code_action.RunActionContext[CleanFinecodeLogsRunPayload]): + ... + + +@dataclasses.dataclass +class CleanFinecodeLogsRunResult(code_action.RunActionResult): + errors: list[str] + + def update(self, other: code_action.RunActionResult) -> None: + if not isinstance(other, CleanFinecodeLogsRunResult): + return + self.errors += other.errors + + def to_text(self) -> str | textstyler.StyledText: + return "\n".join(self.errors) + + @property + def return_code(self) -> code_action.RunReturnCode: + if len(self.errors) == 0: + return code_action.RunReturnCode.SUCCESS + else: + return code_action.RunReturnCode.ERROR + + +class CleanFinecodeLogsAction(code_action.Action[CleanFinecodeLogsRunPayload, CleanFinecodeLogsRunContext, CleanFinecodeLogsRunResult]): + PAYLOAD_TYPE = CleanFinecodeLogsRunPayload + RUN_CONTEXT_TYPE = CleanFinecodeLogsRunContext + RESULT_TYPE = CleanFinecodeLogsRunResult diff --git a/finecode_extension_api/src/finecode_extension_api/actions/dump_config.py b/finecode_extension_api/src/finecode_extension_api/actions/dump_config.py index 4cfa428..e603640 100644 --- a/finecode_extension_api/src/finecode_extension_api/actions/dump_config.py +++ b/finecode_extension_api/src/finecode_extension_api/actions/dump_config.py @@ -22,17 +22,19 @@ class DumpConfigRunPayload(code_action.RunActionPayload): target_file_path: pathlib.Path -class DumpConfigRunContext(code_action.RunActionContext): +class DumpConfigRunContext(code_action.RunActionContext[DumpConfigRunPayload]): def __init__( self, run_id: int, + initial_payload: DumpConfigRunPayload, + meta: code_action.RunActionMeta ) -> None: - super().__init__(run_id=run_id) + super().__init__(run_id=run_id, initial_payload=initial_payload, meta=meta) self.raw_config_dump: dict[str, typing.Any] = {} - async def init(self, initial_payload: DumpConfigRunPayload) -> None: - self.raw_config_dump = initial_payload.project_raw_config + async def init(self) -> None: + self.raw_config_dump = self.initial_payload.project_raw_config @dataclasses.dataclass @@ -51,7 +53,7 @@ def to_text(self) -> str | textstyler.StyledText: return formatted_dump_str -class DumpConfigAction(code_action.Action): +class DumpConfigAction(code_action.Action[DumpConfigRunPayload, DumpConfigRunContext, DumpConfigRunResult]): PAYLOAD_TYPE = DumpConfigRunPayload RUN_CONTEXT_TYPE = DumpConfigRunContext RESULT_TYPE = DumpConfigRunResult diff --git a/finecode_extension_api/src/finecode_extension_api/actions/format.py b/finecode_extension_api/src/finecode_extension_api/actions/format.py index fa251f0..fae90d6 100644 --- a/finecode_extension_api/src/finecode_extension_api/actions/format.py +++ b/finecode_extension_api/src/finecode_extension_api/actions/format.py @@ -1,127 +1,39 @@ +import enum import dataclasses import sys from pathlib import Path -from typing import NamedTuple - -from finecode_extension_api.interfaces import ifilemanager if sys.version_info >= (3, 12): from typing import override else: from typing_extensions import override -from finecode_extension_api import code_action, textstyler - - -@dataclasses.dataclass -class FormatRunPayload(code_action.RunActionPayload): - file_paths: list[Path] - save: bool - +from finecode_extension_api import code_action +from finecode_extension_api.actions import format_files as format_files_action -class FileInfo(NamedTuple): - file_content: str - file_version: str - -class FormatRunContext(code_action.RunActionContext): - def __init__( - self, - run_id: int, - file_manager: ifilemanager.IFileManager, - ) -> None: - super().__init__(run_id=run_id) - self.file_manager = file_manager - - self.file_info_by_path: dict[Path, FileInfo] = {} - - async def init(self, initial_payload: FormatRunPayload) -> None: - for file_path in initial_payload.file_paths: - file_content = await self.file_manager.get_content(file_path) - file_version = await self.file_manager.get_file_version(file_path) - self.file_info_by_path[file_path] = FileInfo( - file_content=file_content, file_version=file_version - ) - - -@dataclasses.dataclass -class FormatRunFileResult: - changed: bool - # changed code or empty string if code was not changed - code: str +class FormatTarget(enum.StrEnum): + PROJECT = 'project' + FILES = 'files' @dataclasses.dataclass -class FormatRunResult(code_action.RunActionResult): - result_by_file_path: dict[Path, FormatRunFileResult] - - @override - def update(self, other: code_action.RunActionResult) -> None: - if not isinstance(other, FormatRunResult): - return - - for file_path, other_result in other.result_by_file_path.items(): - if other_result.changed is True: - self.result_by_file_path[file_path] = other_result +class FormatRunPayload(code_action.RunActionPayload): + save: bool = True + target: FormatTarget = FormatTarget.PROJECT + # optional, expected only with `target == FormatTarget.FILES` + file_paths: list[Path] = dataclasses.field(default_factory=list) - def to_text(self) -> str | textstyler.StyledText: - text: textstyler.StyledText = textstyler.StyledText() - unchanged_counter: int = 0 - for file_path, file_result in self.result_by_file_path.items(): - if file_result.changed: - text.append("reformatted ") - text.append_styled(file_path, bold=True) - text.append("\n") - else: - unchanged_counter += 1 - text.append_styled( - f"{unchanged_counter} files", foreground=textstyler.Color.BLUE - ) - text.append(" unchanged.") +class FormatRunContext(code_action.RunActionContext[FormatRunPayload]): + ... - return text +@dataclasses.dataclass +class FormatRunResult(format_files_action.FormatFilesRunResult): + ... -class FormatAction(code_action.Action): +class FormatAction(code_action.Action[FormatRunPayload, FormatRunContext, FormatRunResult]): PAYLOAD_TYPE = FormatRunPayload RUN_CONTEXT_TYPE = FormatRunContext RESULT_TYPE = FormatRunResult - - -@dataclasses.dataclass -class SaveFormatHandlerConfig(code_action.ActionHandlerConfig): ... - - -class SaveFormatHandler( - code_action.ActionHandler[FormatAction, SaveFormatHandlerConfig] -): - def __init__( - self, - file_manager: ifilemanager.IFileManager, - ) -> None: - self.file_manager = file_manager - - async def run( - self, payload: FormatRunPayload, run_context: FormatRunContext - ) -> FormatRunResult: - file_paths = payload.file_paths - save = payload.save - - if save is True: - for file_path in file_paths: - file_content = run_context.file_info_by_path[file_path].file_content - await self.file_manager.save_file( - file_path=file_path, file_content=file_content - ) - - result = FormatRunResult( - result_by_file_path={ - file_path: FormatRunFileResult( - changed=False, - code=run_context.file_info_by_path[file_path].file_content, - ) - for file_path in file_paths - } - ) - return result diff --git a/finecode_extension_api/src/finecode_extension_api/actions/format_files.py b/finecode_extension_api/src/finecode_extension_api/actions/format_files.py new file mode 100644 index 0000000..df2550d --- /dev/null +++ b/finecode_extension_api/src/finecode_extension_api/actions/format_files.py @@ -0,0 +1,137 @@ +import dataclasses +import sys +from pathlib import Path +from typing import NamedTuple + +from finecode_extension_api.interfaces import ifileeditor + +if sys.version_info >= (3, 12): + from typing import override +else: + from typing_extensions import override + +from finecode_extension_api import code_action, textstyler + + +@dataclasses.dataclass +class FormatFilesRunPayload(code_action.RunActionPayload): + file_paths: list[Path] + save: bool + + +class FileInfo(NamedTuple): + file_content: str + file_version: str + + +FILE_OPERATION_AUTHOR = ifileeditor.FileOperationAuthor(id='FormatFilesAction') + +class FormatFilesRunContext(code_action.RunActionContext[FormatFilesRunPayload]): + def __init__( + self, + run_id: int, + initial_payload: FormatFilesRunPayload, + meta: code_action.RunActionMeta, + file_editor: ifileeditor.IFileEditor + ) -> None: + super().__init__(run_id=run_id, initial_payload=initial_payload, meta=meta) + self.file_editor = file_editor + + self.file_info_by_path: dict[Path, FileInfo] = {} + self.file_editor_session: ifileeditor.IFileEditorSession + + async def init(self) -> None: + self.file_editor_session = await self.exit_stack.enter_async_context(self.file_editor.session(FILE_OPERATION_AUTHOR)) + for file_path in self.initial_payload.file_paths: + file_info = await self.exit_stack.enter_async_context(self.file_editor_session.read_file(file_path, block=True)) + file_content = file_info.content + file_version = file_info.version + self.file_info_by_path[file_path] = FileInfo( + file_content=file_content, file_version=file_version + ) + + +@dataclasses.dataclass +class FormatRunFileResult: + changed: bool + # changed code or empty string if code was not changed + code: str + + +@dataclasses.dataclass +class FormatFilesRunResult(code_action.RunActionResult): + result_by_file_path: dict[Path, FormatRunFileResult] + + @override + def update(self, other: code_action.RunActionResult) -> None: + if not isinstance(other, FormatFilesRunResult): + return + + for file_path, other_result in other.result_by_file_path.items(): + if other_result.changed is True: + self.result_by_file_path[file_path] = other_result + + def to_text(self) -> str | textstyler.StyledText: + text: textstyler.StyledText = textstyler.StyledText() + unchanged_counter: int = 0 + + for file_path, file_result in self.result_by_file_path.items(): + if file_result.changed: + text.append("reformatted ") + text.append_styled(file_path, bold=True) + text.append("\n") + else: + unchanged_counter += 1 + text.append_styled( + f"{unchanged_counter} files", foreground=textstyler.Color.BLUE + ) + text.append(" unchanged.") + + return text + + +class FormatFilesAction(code_action.Action[FormatFilesRunPayload, FormatFilesRunContext, FormatFilesRunResult]): + PAYLOAD_TYPE = FormatFilesRunPayload + RUN_CONTEXT_TYPE = FormatFilesRunContext + RESULT_TYPE = FormatFilesRunResult + + +@dataclasses.dataclass +class SaveFormatFilesHandlerConfig(code_action.ActionHandlerConfig): ... + + +class SaveFormatFilesHandler( + code_action.ActionHandler[FormatFilesAction, SaveFormatFilesHandlerConfig] +): + FILE_OPERATION_AUTHOR = ifileeditor.FileOperationAuthor(id='SaveFormatFilesHandler') + + def __init__( + self, + file_editor: ifileeditor.IFileEditor, + ) -> None: + self.file_editor = file_editor + + async def run( + self, payload: FormatFilesRunPayload, run_context: FormatFilesRunContext + ) -> FormatFilesRunResult: + file_paths = payload.file_paths + save = payload.save + + if save is True: + async with self.file_editor.session(self.FILE_OPERATION_AUTHOR) as session: + for file_path in file_paths: + file_content = run_context.file_info_by_path[file_path].file_content + await session.save_file( + file_path=file_path, file_content=file_content + ) + + result = FormatFilesRunResult( + result_by_file_path={ + file_path: FormatRunFileResult( + changed=False, + code=run_context.file_info_by_path[file_path].file_content, + ) + for file_path in file_paths + } + ) + return result diff --git a/finecode_extension_api/src/finecode_extension_api/actions/group_project_files_by_lang.py b/finecode_extension_api/src/finecode_extension_api/actions/group_project_files_by_lang.py new file mode 100644 index 0000000..23bed8a --- /dev/null +++ b/finecode_extension_api/src/finecode_extension_api/actions/group_project_files_by_lang.py @@ -0,0 +1,56 @@ +import dataclasses +import pathlib +import sys + +if sys.version_info >= (3, 12): + from typing import override +else: + from typing_extensions import override + +from finecode_extension_api import code_action, textstyler + + +@dataclasses.dataclass +class GroupProjectFilesByLangRunPayload(code_action.RunActionPayload): + file_paths: list[pathlib.Path] + langs: list[str] | None = None + + +class GroupProjectFilesByLangRunContext(code_action.RunActionContext[GroupProjectFilesByLangRunPayload]): + def __init__( + self, + run_id: int, + initial_payload: GroupProjectFilesByLangRunPayload, + meta: code_action.RunActionMeta + ) -> None: + super().__init__(run_id=run_id, initial_payload=initial_payload, meta=meta) + + +@dataclasses.dataclass +class GroupProjectFilesByLangRunResult(code_action.RunActionResult): + files_by_lang: dict[str, list[pathlib.Path]] + + @override + def update(self, other: code_action.RunActionResult) -> None: + if not isinstance(other, GroupProjectFilesByLangRunResult): + return + + for lang, files in other.files_by_lang.items(): + if lang not in self.files_by_lang: + self.files_by_lang[lang] = files + else: + self.files_by_lang[lang] += files + + def to_text(self) -> str | textstyler.StyledText: + formatted_result = textstyler.StyledText() + for language, files in self.files_by_lang.items(): + formatted_result.append_styled(text=language + "\n", bold=True) + for file_path in files: + formatted_result.append(file_path.as_posix() + "\n") + return formatted_result + + +class GroupProjectFilesByLangAction(code_action.Action[GroupProjectFilesByLangRunPayload, GroupProjectFilesByLangRunContext, GroupProjectFilesByLangRunResult]): + PAYLOAD_TYPE = GroupProjectFilesByLangRunPayload + RUN_CONTEXT_TYPE = GroupProjectFilesByLangRunContext + RESULT_TYPE = GroupProjectFilesByLangRunResult diff --git a/finecode_extension_api/src/finecode_extension_api/actions/install_deps_in_env.py b/finecode_extension_api/src/finecode_extension_api/actions/install_deps_in_env.py index e8de2f8..133bca3 100644 --- a/finecode_extension_api/src/finecode_extension_api/actions/install_deps_in_env.py +++ b/finecode_extension_api/src/finecode_extension_api/actions/install_deps_in_env.py @@ -29,8 +29,10 @@ class InstallDepsInEnvRunContext(code_action.RunActionContext): def __init__( self, run_id: int, + initial_payload: InstallDepsInEnvRunPayload, + meta: code_action.RunActionMeta ) -> None: - super().__init__(run_id=run_id) + super().__init__(run_id=run_id, initial_payload=initial_payload, meta=meta) @dataclasses.dataclass diff --git a/finecode_extension_api/src/finecode_extension_api/actions/lint.py b/finecode_extension_api/src/finecode_extension_api/actions/lint.py index 39f5510..7651f1b 100644 --- a/finecode_extension_api/src/finecode_extension_api/actions/lint.py +++ b/finecode_extension_api/src/finecode_extension_api/actions/lint.py @@ -1,113 +1,35 @@ -import collections.abc import dataclasses import enum from pathlib import Path -from finecode_extension_api import code_action, textstyler +from finecode_extension_api import code_action +from finecode_extension_api.actions import lint_files -@dataclasses.dataclass -class Position: - line: int - character: int - - -@dataclasses.dataclass -class Range: - start: Position - end: Position - - -class LintMessageSeverity(enum.IntEnum): - # use IntEnum to get json serialization out of the box - ERROR = 1 - WARNING = 2 - INFO = 3 - HINT = 4 +class LintTarget(enum.StrEnum): + PROJECT = 'project' + FILES = 'files' @dataclasses.dataclass -class LintMessage: - range: Range - message: str - code: str | None = None - code_description: str | None = None - source: str | None = None - severity: LintMessageSeverity | None = None +class LintRunPayload(code_action.RunActionPayload): + target: LintTarget = LintTarget.PROJECT + # optional, expected only with `target == LintTarget.FILES` + file_paths: list[Path] = dataclasses.field(default_factory=list) @dataclasses.dataclass -class LintRunPayload(code_action.RunActionPayload, collections.abc.AsyncIterable): - file_paths: list[Path] - - def __aiter__(self) -> collections.abc.AsyncIterator[Path]: - return LintRunPayloadIterator(self) - - -@dataclasses.dataclass -class LintRunPayloadIterator(collections.abc.AsyncIterator): - def __init__(self, lint_run_payload: LintRunPayload): - self.lint_run_payload = lint_run_payload - self.current_file_path_index = 0 - - def __aiter__(self): - return self +class LintRunResult(lint_files.LintFilesRunResult): + ... - async def __anext__(self) -> Path: - if len(self.lint_run_payload.file_paths) <= self.current_file_path_index: - raise StopAsyncIteration() - self.current_file_path_index += 1 - return self.lint_run_payload.file_paths[self.current_file_path_index - 1] +LintRunContext = code_action.RunActionWithPartialResultsContext -@dataclasses.dataclass -class LintRunResult(code_action.RunActionResult): - # messages is a dict to support messages for multiple files because it could be the - # case that linter checks given file and its dependencies. - # - # dict key should be Path, but pygls fails to handle slashes in dict keys, use - # strings with posix representation of path instead until the problem is properly - # solved - messages: dict[str, list[LintMessage]] - - def update(self, other: code_action.RunActionResult) -> None: - if not isinstance(other, LintRunResult): - return - - for file_path_str, new_messages in other.messages.items(): - if file_path_str not in self.messages: - self.messages[file_path_str] = [] - self.messages[file_path_str].extend(new_messages) - - def to_text(self) -> str | textstyler.StyledText: - text: textstyler.StyledText = textstyler.StyledText() - for file_path_str, file_messages in self.messages.items(): - if len(file_messages) > 0: - for message in file_messages: - # TODO: relative file path? - source_str = "" - if message.source is not None: - source_str = f" ({message.source})" - text.append_styled(file_path_str, bold=True) - text.append(f":{message.range.start.line}") - text.append(f":{message.range.start.character}: ") - text.append_styled(message.code, foreground=textstyler.Color.RED) - text.append(f" {message.message}{source_str}\n") - else: - text.append_styled(file_path_str, bold=True) - text.append(": OK\n") - - return text - - @property - def return_code(self) -> code_action.RunReturnCode: - for lint_messages in self.messages.values(): - if len(lint_messages) > 0: - return code_action.RunReturnCode.ERROR - return code_action.RunReturnCode.SUCCESS - - -class LintAction(code_action.Action): +class LintAction(code_action.Action[LintRunPayload, LintRunContext, LintRunResult]): PAYLOAD_TYPE = LintRunPayload - RUN_CONTEXT_TYPE = code_action.RunActionWithPartialResultsContext + RUN_CONTEXT_TYPE = LintRunContext RESULT_TYPE = LintRunResult + + +# reexport +LintMessage = lint_files.LintMessage diff --git a/finecode_extension_api/src/finecode_extension_api/actions/lint_files.py b/finecode_extension_api/src/finecode_extension_api/actions/lint_files.py new file mode 100644 index 0000000..2a4280d --- /dev/null +++ b/finecode_extension_api/src/finecode_extension_api/actions/lint_files.py @@ -0,0 +1,114 @@ +import collections.abc +import dataclasses +import enum +from pathlib import Path + +from finecode_extension_api import code_action, textstyler + + +@dataclasses.dataclass +class Position: + line: int + character: int + + +@dataclasses.dataclass +class Range: + start: Position + end: Position + + +class LintMessageSeverity(enum.IntEnum): + # use IntEnum to get json serialization out of the box + ERROR = 1 + WARNING = 2 + INFO = 3 + HINT = 4 + + +@dataclasses.dataclass +class LintMessage: + range: Range + message: str + code: str | None = None + code_description: str | None = None + source: str | None = None + severity: LintMessageSeverity | None = None + + +@dataclasses.dataclass +class LintFilesRunPayload(code_action.RunActionPayload, collections.abc.AsyncIterable[Path]): + file_paths: list[Path] + + def __aiter__(self) -> collections.abc.AsyncIterator[Path]: + return LintFilesRunPayloadIterator(self) + + +@dataclasses.dataclass +class LintFilesRunPayloadIterator(collections.abc.AsyncIterator[Path]): + def __init__(self, lint_files_run_payload: LintFilesRunPayload): + self.lint_files_run_payload = lint_files_run_payload + self.current_file_path_index = 0 + + def __aiter__(self): + return self + + async def __anext__(self) -> Path: + if len(self.lint_files_run_payload.file_paths) <= self.current_file_path_index: + raise StopAsyncIteration() + self.current_file_path_index += 1 + return self.lint_files_run_payload.file_paths[self.current_file_path_index - 1] + + +@dataclasses.dataclass +class LintFilesRunResult(code_action.RunActionResult): + # messages is a dict to support messages for multiple files because it could be the + # case that linter checks given file and its dependencies. + # + # dict key should be Path, but pygls fails to handle slashes in dict keys, use + # strings with posix representation of path instead until the problem is properly + # solved + messages: dict[str, list[LintMessage]] + + def update(self, other: code_action.RunActionResult) -> None: + if not isinstance(other, LintFilesRunResult): + return + + for file_path_str, new_messages in other.messages.items(): + if file_path_str not in self.messages: + self.messages[file_path_str] = [] + self.messages[file_path_str].extend(new_messages) + + def to_text(self) -> str | textstyler.StyledText: + text: textstyler.StyledText = textstyler.StyledText() + for file_path_str, file_messages in self.messages.items(): + if len(file_messages) > 0: + for message in file_messages: + # TODO: relative file path? + source_str = "" + if message.source is not None: + source_str = f" ({message.source})" + text.append_styled(file_path_str, bold=True) + text.append(f":{message.range.start.line}") + text.append(f":{message.range.start.character}: ") + if message.code is not None: + text.append_styled(message.code, foreground=textstyler.Color.RED) + text.append(f" {message.message}{source_str}\n") + else: + text.append_styled(file_path_str, bold=True) + text.append(": OK\n") + + return text + + @property + def return_code(self) -> code_action.RunReturnCode: + for lint_messages in self.messages.values(): + if len(lint_messages) > 0: + return code_action.RunReturnCode.ERROR + return code_action.RunReturnCode.SUCCESS + + +class LintFilesAction(code_action.Action[LintFilesRunPayload, code_action.RunActionWithPartialResultsContext, LintFilesRunResult]): + PAYLOAD_TYPE = LintFilesRunPayload + RUN_CONTEXT_TYPE = code_action.RunActionWithPartialResultsContext + RESULT_TYPE = LintFilesRunResult diff --git a/finecode_extension_api/src/finecode_extension_api/actions/list_project_files_by_lang.py b/finecode_extension_api/src/finecode_extension_api/actions/list_project_files_by_lang.py index f7f7a80..9e61490 100644 --- a/finecode_extension_api/src/finecode_extension_api/actions/list_project_files_by_lang.py +++ b/finecode_extension_api/src/finecode_extension_api/actions/list_project_files_by_lang.py @@ -11,15 +11,18 @@ @dataclasses.dataclass -class ListProjectFilesByLangRunPayload(code_action.RunActionPayload): ... +class ListProjectFilesByLangRunPayload(code_action.RunActionPayload): + langs: list[str] | None = None class ListProjectFilesByLangRunContext(code_action.RunActionContext): def __init__( self, run_id: int, + initial_payload: ListProjectFilesByLangRunPayload, + meta: code_action.RunActionMeta ) -> None: - super().__init__(run_id=run_id) + super().__init__(run_id=run_id, initial_payload=initial_payload, meta=meta) @dataclasses.dataclass @@ -46,7 +49,7 @@ def to_text(self) -> str | textstyler.StyledText: return formatted_result -class ListProjectFilesByLangAction(code_action.Action): +class ListProjectFilesByLangAction(code_action.Action[ListProjectFilesByLangRunPayload, ListProjectFilesByLangRunContext, ListProjectFilesByLangRunResult]): PAYLOAD_TYPE = ListProjectFilesByLangRunPayload RUN_CONTEXT_TYPE = ListProjectFilesByLangRunContext RESULT_TYPE = ListProjectFilesByLangRunResult diff --git a/finecode_extension_api/src/finecode_extension_api/actions/prepare_envs.py b/finecode_extension_api/src/finecode_extension_api/actions/prepare_envs.py index 51ed001..3ca859a 100644 --- a/finecode_extension_api/src/finecode_extension_api/actions/prepare_envs.py +++ b/finecode_extension_api/src/finecode_extension_api/actions/prepare_envs.py @@ -28,12 +28,14 @@ class PrepareEnvsRunPayload(code_action.RunActionPayload): recreate: bool = False -class PrepareEnvsRunContext(code_action.RunActionContext): +class PrepareEnvsRunContext(code_action.RunActionContext[PrepareEnvsRunPayload]): def __init__( self, run_id: int, + initial_payload: PrepareEnvsRunPayload, + meta: code_action.RunActionMeta ) -> None: - super().__init__(run_id=run_id) + super().__init__(run_id=run_id, initial_payload=initial_payload, meta=meta) # project def pathes are stored also in context, because prepare envs can run # tools like pip which expected 'normalized' project definition(=without @@ -50,8 +52,8 @@ def __init__( pathlib.Path, dict[str, typing.Any] ] = {} - async def init(self, initial_payload: PrepareEnvsRunPayload) -> None: - for env_info in initial_payload.envs: + async def init(self) -> None: + for env_info in self.initial_payload.envs: self.project_def_path_by_venv_dir_path[env_info.venv_dir_path] = ( env_info.project_def_path ) @@ -79,7 +81,7 @@ def return_code(self) -> code_action.RunReturnCode: return code_action.RunReturnCode.ERROR -class PrepareEnvsAction(code_action.Action): +class PrepareEnvsAction(code_action.Action[PrepareEnvsRunPayload, PrepareEnvsRunContext, PrepareEnvsRunResult]): PAYLOAD_TYPE = PrepareEnvsRunPayload RUN_CONTEXT_TYPE = PrepareEnvsRunContext RESULT_TYPE = PrepareEnvsRunResult diff --git a/finecode_extension_api/src/finecode_extension_api/actions/prepare_runners.py b/finecode_extension_api/src/finecode_extension_api/actions/prepare_runners.py index a7c0329..9ba4a15 100644 --- a/finecode_extension_api/src/finecode_extension_api/actions/prepare_runners.py +++ b/finecode_extension_api/src/finecode_extension_api/actions/prepare_runners.py @@ -28,12 +28,14 @@ class PrepareRunnersRunPayload(code_action.RunActionPayload): recreate: bool = False -class PrepareRunnersRunContext(code_action.RunActionContext): +class PrepareRunnersRunContext(code_action.RunActionContext[PrepareRunnersRunPayload]): def __init__( self, run_id: int, + initial_payload: PrepareRunnersRunPayload, + meta: code_action.RunActionMeta ) -> None: - super().__init__(run_id=run_id) + super().__init__(run_id=run_id, initial_payload=initial_payload, meta=meta) # project def pathes are stored also in context, because prepare envs can run # tools like pip which expected 'normalized' project definition(=without @@ -50,8 +52,8 @@ def __init__( pathlib.Path, dict[str, typing.Any] ] = {} - async def init(self, initial_payload: PrepareRunnersRunPayload) -> None: - for env_info in initial_payload.envs: + async def init(self) -> None: + for env_info in self.initial_payload.envs: self.project_def_path_by_venv_dir_path[env_info.venv_dir_path] = ( env_info.project_def_path ) @@ -79,7 +81,7 @@ def return_code(self) -> code_action.RunReturnCode: return code_action.RunReturnCode.ERROR -class PrepareRunnersAction(code_action.Action): +class PrepareRunnersAction(code_action.Action[PrepareRunnersRunPayload, PrepareRunnersRunContext, PrepareRunnersRunResult]): PAYLOAD_TYPE = PrepareRunnersRunPayload RUN_CONTEXT_TYPE = PrepareRunnersRunContext RESULT_TYPE = PrepareRunnersRunResult diff --git a/finecode_extension_api/src/finecode_extension_api/code_action.py b/finecode_extension_api/src/finecode_extension_api/code_action.py index cde49bf..c30649b 100644 --- a/finecode_extension_api/src/finecode_extension_api/code_action.py +++ b/finecode_extension_api/src/finecode_extension_api/code_action.py @@ -2,6 +2,7 @@ import asyncio import collections.abc +import contextlib import dataclasses import enum import typing @@ -18,6 +19,26 @@ class ActionHandlerConfig: ... class RunActionPayload: ... +class RunActionTrigger(enum.StrEnum): + USER = 'user' + SYSTEM = 'system' + UNKNOWN = 'unknown' + + +class DevEnv(enum.StrEnum): + IDE = 'ide' + CLI = 'cli' + AI = 'ai' + PRECOMMIT = 'precommit' + CI_CD = 'cicd' + + +@dataclasses.dataclass +class RunActionMeta: + trigger: RunActionTrigger + dev_env: DevEnv + + class RunReturnCode(enum.IntEnum): SUCCESS = 0 ERROR = 1 @@ -50,25 +71,40 @@ def return_code(self) -> RunReturnCode: ) -class RunActionContext: +class RunActionContext(typing.Generic[RunPayloadType]): # data object to save data between action steps(only during one run, after run data # is removed). Keep it simple, without business logic, just data storage, but you # still may initialize values in constructor using dependency injection if needed # to avoid handling in action cases when run context is not initialized and is # initialized already. - def __init__(self, run_id: int) -> None: + def __init__(self, run_id: int, initial_payload: RunPayloadType, meta: RunActionMeta) -> None: self.run_id = run_id + self.initial_payload = initial_payload + self.meta = meta + self.exit_stack = contextlib.AsyncExitStack() + + async def init(self) -> None: + ... + + async def __aenter__(self): + await self.exit_stack.__aenter__() + + await self.init() + + return self + + async def __aexit__(self, exc_type, exc_val, exc_tb): + return await self.exit_stack.__aexit__(exc_type, exc_val, exc_tb) - async def init(self, initial_payload: RunPayloadType) -> None: ... RunContextType = TypeVar("RunContextType", bound=RunActionContext) class RunActionWithPartialResultsContext(RunActionContext): - def __init__(self, run_id: int) -> None: - super().__init__(run_id=run_id) + def __init__(self, run_id: int, initial_payload: RunPayloadType, meta: RunActionMeta) -> None: + super().__init__(run_id=run_id, initial_payload=initial_payload, meta=meta) self.partial_result_scheduler = partialresultscheduler.PartialResultScheduler() @@ -78,10 +114,10 @@ class ActionConfig: class Action(Generic[RunPayloadType, RunContextType, RunResultType]): - PAYLOAD_TYPE: typing.Type[RunActionPayload] = RunActionPayload - RUN_CONTEXT_TYPE: typing.Type[RunActionContext] = RunActionContext - RESULT_TYPE: typing.Type[RunActionResult] = RunActionResult - CONFIG_TYPE: typing.Type[ActionConfig] = ActionConfig + PAYLOAD_TYPE: type[RunActionPayload] = RunActionPayload + RUN_CONTEXT_TYPE: type[RunActionContext] = RunActionContext + RESULT_TYPE: type[RunActionResult] = RunActionResult + CONFIG_TYPE: type[ActionConfig] = ActionConfig class StopActionRunWithResult(Exception): @@ -151,6 +187,3 @@ async def run( | collections.abc.Mapping[IterableType, asyncio.Task[RunResultType]] ): raise NotImplementedError() - - async def stop(self): - raise NotImplementedError() diff --git a/finecode_extension_api/src/finecode_extension_api/interfaces/iactionrunner.py b/finecode_extension_api/src/finecode_extension_api/interfaces/iactionrunner.py index 8ae5e10..22a825d 100644 --- a/finecode_extension_api/src/finecode_extension_api/interfaces/iactionrunner.py +++ b/finecode_extension_api/src/finecode_extension_api/interfaces/iactionrunner.py @@ -1,10 +1,19 @@ -from typing import Any, Protocol +import typing +from finecode_extension_api import code_action, service -class IActionRunner(Protocol): + + +class IActionRunner(service.Service, typing.Protocol): async def run_action( - self, name: str, payload: dict[str, Any] - ) -> dict[str, Any]: ... + self, action: type[code_action.Action[code_action.RunPayloadType, code_action.RunContextType, code_action.RunResultType]], payload: code_action.RunPayloadType, meta: code_action.RunActionMeta + ) -> code_action.RunResultType: ... + + def get_actions_names(self) -> list[str]: + ... + + def get_action_by_name(self, name: str) -> type[code_action.Action[code_action.RunPayloadType, code_action.RunContextType, code_action.RunResultType]]: + ... class BaseRunActionException(Exception): diff --git a/finecode_extension_api/src/finecode_extension_api/interfaces/icommandrunner.py b/finecode_extension_api/src/finecode_extension_api/interfaces/icommandrunner.py index e762992..25c11f0 100644 --- a/finecode_extension_api/src/finecode_extension_api/interfaces/icommandrunner.py +++ b/finecode_extension_api/src/finecode_extension_api/interfaces/icommandrunner.py @@ -14,11 +14,11 @@ def write_to_stdin(self, value: str) -> None: ... def close_stdin(self) -> None: ... -class ISyncProcess(IProcess): +class ISyncProcess(IProcess, Protocol): def wait_for_end(self, timeout: float | None = None) -> None: ... -class IAsyncProcess(IProcess): +class IAsyncProcess(IProcess, Protocol): async def wait_for_end(self, timeout: float | None = None) -> None: ... diff --git a/finecode_extension_api/src/finecode_extension_api/interfaces/iextensionrunnerinfoprovider.py b/finecode_extension_api/src/finecode_extension_api/interfaces/iextensionrunnerinfoprovider.py index 7bed5e3..953a314 100644 --- a/finecode_extension_api/src/finecode_extension_api/interfaces/iextensionrunnerinfoprovider.py +++ b/finecode_extension_api/src/finecode_extension_api/interfaces/iextensionrunnerinfoprovider.py @@ -3,10 +3,14 @@ class IExtensionRunnerInfoProvider(Protocol): + def get_current_env_name(self) -> str: ... + def get_cache_dir_path(self) -> pathlib.Path: ... def get_venv_dir_path_of_env(self, env_name: str) -> pathlib.Path: ... + def get_current_venv_dir_path(self) -> pathlib.Path: ... + def get_venv_site_packages( self, venv_dir_path: pathlib.Path ) -> list[pathlib.Path]: ... diff --git a/finecode_extension_api/src/finecode_extension_api/interfaces/ifileeditor.py b/finecode_extension_api/src/finecode_extension_api/interfaces/ifileeditor.py new file mode 100644 index 0000000..58513da --- /dev/null +++ b/finecode_extension_api/src/finecode_extension_api/interfaces/ifileeditor.py @@ -0,0 +1,121 @@ +import typing +import dataclasses +import collections.abc +import contextlib +import pathlib +from typing import Protocol + +from finecode_extension_api import common_types + +# reexport +Position = common_types.Position +Range = common_types.Range + + +@dataclasses.dataclass +class FileInfo: + content: str + version: str + + +@dataclasses.dataclass +class FileChangePartial: + """The range of the document that changed.""" + range: Range + """The new text for the provided range.""" + text: str + + +@dataclasses.dataclass +class FileChangeFull: + # new file content + text: str + + +FileChange = FileChangePartial | FileChangeFull + + +@dataclasses.dataclass +class FileOperationAuthor: + id: str + + +@dataclasses.dataclass +class FileChangeEvent: + file_path: pathlib.Path + author: FileOperationAuthor + change: FileChange + + +class FileAlreadyOpenError(Exception): + """Raised when trying to open a file that's already open in the session.""" + def __init__(self, message: str) -> None: + self.message = message + + +class IFileEditorSession(Protocol): + # Reasons for using sessions: + # - all operations should be authored to provide tracebility + # - some operations are author-specific, e.g. subscribe to changes of all opened by + # author files + async def change_file(self, file_path: pathlib.Path, change: FileChange) -> None: + ... + + @contextlib.asynccontextmanager + async def subscribe_to_changes_of_opened_files(self) -> collections.abc.AsyncIterator[FileChangeEvent]: + # TODO: bunch of change events at once? + ... + + async def open_file(self, file_path: pathlib.Path) -> None: + ... + + async def save_opened_file(self, file_path: pathlib.Path) -> None: + ... + + async def close_file(self, file_path: pathlib.Path) -> None: + ... + + @contextlib.asynccontextmanager + async def read_file(self, file_path: pathlib.Path, block: bool = False) -> collections.abc.AsyncIterator[FileInfo]: + ... + + async def read_file_version(self, file_path: pathlib.Path) -> str: + # in case only file version is needed without content + ... + + async def save_file(self, file_path: pathlib.Path, file_content: str) -> None: + ... + + # TODO + # async def reread_file() + + +class IFileEditor(Protocol): + """Service for managing read/write access to the files, e.g: + - read only for reading (other can read as well) (e.g. linter) + - read for modyfing and block until modification is done (e.g. code formatter) + - read for modyfing without blocking (e.g. by IDE) + + IDE needs possibility to subscribe on changes to sync. + IDE: + - user opens a file in IDE -> IDE sends 'open_file' and subscribes to changes, did by other + - user edits the file in IDE -> IDE sends 'file_changed' with changes to FineCode. All subscribers get the changes + -> file change should have an author + - user saves the file in IDE -> IDE sends 'file_modified_on_disk' || TODO: distinguish saved file and not saved? or just keep opened? + - user closes the file in IDE -> IDE sends 'close_file' and unsubscribes from changes + + All tools access files via `ifileeditor.IFileEditor`, which stores the current(also not saved) content of the file. + + Reading/writing files: use always `ifileeditor.IFileEditor` to read and write files. It will check whether file is opened + and opened content should be modified or file is not opened and it can be modified directly on disk. + + 'opened files' ... files user sees and works with, not files which tools read + """ + @contextlib.asynccontextmanager + async def session(self, author: FileOperationAuthor) -> typing.AsyncContextManager[IFileEditorSession]: + """Create a session for a specific author.""" + ... + + def get_opened_files(self) -> list[pathlib.Path]: + # opened files from all sessions + ... diff --git a/finecode_extension_api/src/finecode_extension_api/interfaces/ifilemanager.py b/finecode_extension_api/src/finecode_extension_api/interfaces/ifilemanager.py index 9b21f6a..21ba5bd 100644 --- a/finecode_extension_api/src/finecode_extension_api/interfaces/ifilemanager.py +++ b/finecode_extension_api/src/finecode_extension_api/interfaces/ifilemanager.py @@ -3,13 +3,19 @@ class IFileManager(Protocol): + """Service for file system access: list files, create/read/write/delete files and + directories. + + Its main purpose is to abstract file storage(local, remote, file system etc). + Additional functionalities such as management of opened files etc are not part of + this service. + """ async def get_content(self, file_path: Path) -> str: ... async def get_file_version(self, file_path: Path) -> str: - # TODO: move file versioning to cache ... - async def save_file(self, file_path: Path, file_content) -> None: ... + async def save_file(self, file_path: Path, file_content: str) -> None: ... async def create_dir( self, dir_path: Path, create_parents: bool = True, exist_ok: bool = True diff --git a/finecode_extension_api/src/finecode_extension_api/interfaces/iprojectinfoprovider.py b/finecode_extension_api/src/finecode_extension_api/interfaces/iprojectinfoprovider.py index f978b3f..004451f 100644 --- a/finecode_extension_api/src/finecode_extension_api/interfaces/iprojectinfoprovider.py +++ b/finecode_extension_api/src/finecode_extension_api/interfaces/iprojectinfoprovider.py @@ -15,6 +15,8 @@ async def get_project_raw_config( async def get_current_project_raw_config(self) -> dict[str, Any]: ... + def get_current_project_raw_config_version(self) -> int: + ... class InvalidProjectConfig(Exception): def __init__(self, message: str) -> None: diff --git a/finecode_extension_runner/pyproject.toml b/finecode_extension_runner/pyproject.toml index 114e5e1..9dcc324 100644 --- a/finecode_extension_runner/pyproject.toml +++ b/finecode_extension_runner/pyproject.toml @@ -9,9 +9,10 @@ dependencies = [ "loguru==0.7.*", "click==8.1.*", "pydantic==2.11.*", - "pygls==2.0.0-a6", + "pygls==2.0.0", "finecode_extension_api==0.3.*", "deepmerge==2.0.*", + "debugpy==1.8.*", ] [dependency-groups] @@ -29,6 +30,10 @@ finecode_dev_common_preset = { path = "../finecode_dev_common_preset", editable finecode = { path = "../", editable = true } finecode_extension_runner = { path = "../finecode_extension_runner", editable = true } finecode_extension_api = { path = "../finecode_extension_api", editable = true } +finecode_jsonrpc = { path = "../finecode_jsonrpc", editable = true } +finecode_builtin_handlers = { path = "../finecode_builtin_handlers", editable = true } +fine_python_lint = { path = "../presets/fine_python_lint", editable = true } +fine_python_format = { path = "../presets/fine_python_format", editable = true } [build-system] requires = ["setuptools>=64", "setuptools-scm>=8"] @@ -54,3 +59,6 @@ layers = [ [tool.setuptools_scm] version_file = "src/finecode_extension_runner/_version.py" root = ".." + +[tool.finecode.env.dev_no_runtime] +# runner.debug = true diff --git a/finecode_extension_runner/src/finecode_extension_runner/__main__.py b/finecode_extension_runner/src/finecode_extension_runner/__main__.py index 2149a65..23a88a5 100644 --- a/finecode_extension_runner/src/finecode_extension_runner/__main__.py +++ b/finecode_extension_runner/src/finecode_extension_runner/__main__.py @@ -1,4 +1,5 @@ from finecode_extension_runner import cli + if __name__ == "__main__": cli.main() diff --git a/finecode_extension_runner/src/finecode_extension_runner/_services/run_action.py b/finecode_extension_runner/src/finecode_extension_runner/_services/run_action.py index 34f285e..ec649f1 100644 --- a/finecode_extension_runner/src/finecode_extension_runner/_services/run_action.py +++ b/finecode_extension_runner/src/finecode_extension_runner/_services/run_action.py @@ -9,7 +9,7 @@ from loguru import logger from pydantic.dataclasses import dataclass as pydantic_dataclass -from finecode_extension_api import code_action, textstyler +from finecode_extension_api import code_action, textstyler, service from finecode_extension_api.interfaces import iactionrunner from finecode_extension_runner import context, domain, global_state from finecode_extension_runner import ( @@ -46,33 +46,20 @@ def set_partial_result_sender(send_func: typing.Callable) -> None: ) -async def run_action( - request: schemas.RunActionRequest, options: schemas.RunActionOptions -) -> schemas.RunActionResponse: - global last_run_id - run_id = last_run_id - last_run_id += 1 - logger.trace( - f"Run action '{request.action_name}', run id: {run_id}, partial result token: {options.partial_result_token}" - ) - # TODO: check whether config is set: this will be solved by passing initial - # configuration as payload of initialize - if global_state.runner_context is None: - raise ActionFailedException( - "Run of action failed because extension runner is not initialized yet" - ) +class AsyncPlaceholderContext: + async def __aenter__(self): + return self - start_time = time.time_ns() - project_def = global_state.runner_context.project + async def __aexit__(self, exc_type, exc_val, exc_tb): ... - try: - action = project_def.actions[request.action_name] - except KeyError: - logger.error(f"R{run_id} | Action {request.action_name} not found") - raise ActionFailedException( - f"R{run_id} | Action {request.action_name} not found" - ) +async def run_action( + action_def: domain.Action, + payload: code_action.RunActionPayload | None, + meta: code_action.RunActionMeta, + partial_result_token: int | str | None = None, + run_id: int | None = None, +) -> code_action.RunActionResult | None: # design decisions: # - keep payload unchanged between all subaction runs. # For intermediate data use run_context @@ -81,44 +68,66 @@ async def run_action( # returned. (experimental) # - execution of handlers can be concurrent or sequential. But executions of handler # on iterable payloads(single parts) are always concurrent. - action_name = request.action_name + + if run_id is None: + global last_run_id + run_id = last_run_id + last_run_id += 1 + + logger.trace( + f"Run action '{action_def.name}', run id: {run_id}, partial result token: {partial_result_token}" + ) + + # TODO: check whether config is set: this will be solved by passing initial + # configuration as payload of initialize + if global_state.runner_context is None: + raise ActionFailedException( + "Run of action failed because extension runner is not initialized yet" + ) + + start_time = time.time_ns() try: - action_cache = global_state.runner_context.action_cache_by_name[action_name] + action_cache = global_state.runner_context.action_cache_by_name[action_def.name] except KeyError: action_cache = domain.ActionCache() - global_state.runner_context.action_cache_by_name[action_name] = action_cache + global_state.runner_context.action_cache_by_name[action_def.name] = action_cache if action_cache.exec_info is not None: action_exec_info = action_cache.exec_info else: - action_exec_info = create_action_exec_info(action) + action_exec_info = create_action_exec_info(action_def) action_cache.exec_info = action_exec_info - # TODO: catch validation errors - payload: code_action.RunActionPayload | None = None - if action_exec_info.payload_type is not None: - payload_type_with_validation = pydantic_dataclass(action_exec_info.payload_type) - payload = payload_type_with_validation(**request.params) - - run_context: code_action.RunActionContext | None = None + run_context: code_action.RunActionContext | AsyncPlaceholderContext if action_exec_info.run_context_type is not None: constructor_args = await resolve_func_args_with_di( action_exec_info.run_context_type.__init__, - known_args={"run_id": lambda _: run_id}, + known_args={ + "run_id": lambda _: run_id, + "initial_payload": lambda _: payload, + "meta": lambda _: meta, + }, params_to_ignore=["self"], ) - run_context = action_exec_info.run_context_type(**constructor_args) - # TODO: handler errors - await run_context.init(initial_payload=payload) + # developers can change run context constructor, handle all exceptions + try: + run_context = action_exec_info.run_context_type(**constructor_args) + except Exception as exception: + raise ActionFailedException( + f"Failed to instantiate run context of action {action_def.name}(Run {run_id}): {str(exception)}." + + " See ER logs for more details" + ) from exception + else: + # TODO: check run_context below, whether AsyncPlaceholder can really be used + run_context = AsyncPlaceholderContext() action_result: code_action.RunActionResult | None = None runner_context = global_state.runner_context # TODO: take value from action config - execute_handlers_concurrently = action.name == "lint" - partial_result_token = options.partial_result_token + execute_handlers_concurrently = action_def.name.startswith("lint_files_") send_partial_results = partial_result_token is not None with action_exec_info.process_executor.activate(): # action payload can be iterable or not @@ -129,7 +138,7 @@ async def run_action( logger.trace( f"R{run_id} | Iterable payload, execute all handlers to schedule coros" ) - for handler in action.handlers: + for handler in action_def.handlers: await execute_action_handler( handler=handler, payload=payload, @@ -166,7 +175,7 @@ async def run_action( send_partial_results, partial_result_token, partial_result_sender, - action.name, + action_def.name, run_id, ) else: @@ -175,7 +184,7 @@ async def run_action( send_partial_results, partial_result_token, partial_result_sender, - action.name, + action_def.name, run_id, ) subresult_task = tg.create_task(coro) @@ -189,7 +198,7 @@ async def run_action( else: errors.append(exc.message) raise ActionFailedException( - f"Running action handlers of '{action.name}' failed(Run {run_id}): {errors}." + f"Running action handlers of '{action_def.name}' failed(Run {run_id}): {errors}." " See ER logs for more details" ) @@ -211,7 +220,7 @@ async def run_action( handlers_tasks: list[asyncio.Task] = [] try: async with asyncio.TaskGroup() as tg: - for handler in action.handlers: + for handler in action_def.handlers: handler_task = tg.create_task( execute_action_handler( handler=handler, @@ -229,7 +238,7 @@ async def run_action( # TODO: expected / unexpected? logger.exception(exc) raise ActionFailedException( - f"Running action handlers of '{action.name}' failed" + f"Running action handlers of '{action_def.name}' failed" f"(Run {run_id}). See ER logs for more details" ) @@ -241,7 +250,7 @@ async def run_action( else: action_result.update(coro_result) else: - for handler in action.handlers: + for handler in action_def.handlers: try: handler_result = await execute_action_handler( handler=handler, @@ -264,7 +273,7 @@ async def run_action( end_time = time.time_ns() duration = (end_time - start_time) / 1_000_000 logger.trace( - f"R{run_id} | Run action end '{request.action_name}', duration: {duration}ms" + f"R{run_id} | Run action end '{action_def.name}', duration: {duration}ms" ) # if partial results were sent, `action_result` may be None @@ -278,6 +287,63 @@ async def run_action( f"Unexpected result type: {type(action_result).__name__}" ) + return action_result + + +async def run_action_raw( + request: schemas.RunActionRequest, options: schemas.RunActionOptions +) -> schemas.RunActionResponse: + global last_run_id + run_id = last_run_id + last_run_id += 1 + logger.trace( + f"Run action '{request.action_name}', run id: {run_id}, partial result token: {options.partial_result_token}" + ) + # # TODO: check whether config is set: this will be solved by passing initial + # # configuration as payload of initialize + if global_state.runner_context is None: + raise ActionFailedException( + "Run of action failed because extension runner is not initialized yet" + ) + + project_def = global_state.runner_context.project + + try: + action = project_def.actions[request.action_name] + except KeyError: + logger.error(f"R{run_id} | Action {request.action_name} not found") + raise ActionFailedException( + f"R{run_id} | Action {request.action_name} not found" + ) + + action_name = request.action_name + + try: + action_cache = global_state.runner_context.action_cache_by_name[action_name] + except KeyError: + action_cache = domain.ActionCache() + global_state.runner_context.action_cache_by_name[action_name] = action_cache + + if action_cache.exec_info is not None: + action_exec_info = action_cache.exec_info + else: + action_exec_info = create_action_exec_info(action) + action_cache.exec_info = action_exec_info + + # TODO: catch validation errors + payload: code_action.RunActionPayload | None = None + if action_exec_info.payload_type is not None: + payload_type_with_validation = pydantic_dataclass(action_exec_info.payload_type) + payload = payload_type_with_validation(**request.params) + + action_result = await run_action( + action_def=action, + payload=payload, + meta=options.meta, + partial_result_token=options.partial_result_token, + run_id=run_id, + ) + response = action_result_to_run_action_response( action_result, options.result_format ) @@ -342,7 +408,7 @@ async def resolve_func_args_with_di( func: typing.Callable, known_args: dict[str, typing.Callable[[typing.Any], typing.Any]] | None = None, params_to_ignore: list[str] | None = None, -): +) -> dict[str, typing.Any]: func_parameters = inspect.signature(func).parameters func_annotations = inspect.get_annotations(func, eval_str=True) args: dict[str, typing.Any] = {} @@ -371,7 +437,7 @@ async def resolve_func_args_with_di( async def execute_action_handler( handler: domain.ActionHandler, payload: code_action.RunActionPayload | None, - run_context: code_action.RunActionContext | None, + run_context: code_action.RunActionContext | AsyncPlaceholderContext, run_id: int, action_exec_info: domain.ActionExecInfo, action_cache: domain.ActionCache, @@ -404,6 +470,7 @@ async def execute_action_handler( handler_instance = handler_cache.instance handler_run_func = handler_instance.run exec_info = handler_cache.exec_info + # TODO: check status of exec_info? logger.trace( f"R{run_id} | Instance of action handler {handler.name} found in cache" ) @@ -421,7 +488,7 @@ async def execute_action_handler( logger.error(error) raise ActionFailedException( f"Import of action handler '{handler.name}' failed(Run {run_id}): {handler.source}" - ) + ) from error def get_handler_config(param_type): # TODO: validation errors @@ -450,6 +517,23 @@ def get_process_executor(param_type): handler_instance = action_handler(**args) handler_cache.instance = handler_instance handler_run_func = handler_instance.run + + service_instances = [ + instance + for instance in args.values() + if isinstance(instance, service.Service) + ] + handler_cache.used_services = service_instances + for service_instance in service_instances: + if service_instance not in runner_context.running_services: + runner_context.running_services[service_instance] = ( + domain.RunningServiceInfo(used_by=[]) + ) + + runner_context.running_services[service_instance].used_by.append( + handler_instance + ) + else: handler_run_func = action_handler @@ -470,48 +554,70 @@ def get_process_executor(param_type): ) raise ActionFailedException( f"Initialisation of action handler '{handler.name}' failed(Run {run_id}): {e}" - ) + ) from e exec_info.status = domain.ActionHandlerExecInfoStatus.INITIALIZED def get_run_payload(param_type): return payload - def get_run_context(param_type): - return run_context - - # DI in `run` function is allowed only for action handlers in form of functions. - # `run` in classes may not have additional parameters, constructor parameters should - # be used instead. TODO: Validate? - args = await resolve_func_args_with_di( - func=handler_run_func, - known_args={"payload": get_run_payload, "run_context": get_run_context}, - ) - # TODO: cache parameters + # to be able to catch source of exceptions in user-accessible code more precisely, + # manually enter and exit run context try: - # there is also `inspect.iscoroutinefunction` but it cannot recognize coroutine - # functions which are class methods. Use `isawaitable` on result instead. - call_result = handler_run_func(**args) - if inspect.isawaitable(call_result): - execution_result = await call_result - else: - execution_result = call_result + run_context_instance = await run_context.__aenter__() except Exception as exception: - if isinstance(exception, code_action.StopActionRunWithResult): - action_result = exception.result - response = action_result_to_run_action_response(action_result, "string") - raise StopWithResponse(response=response) - elif isinstance(exception, iactionrunner.BaseRunActionException) or isinstance( - exception, code_action.ActionFailedException - ): - error_str = exception.message - else: - logger.error("Unhandled exception in action handler:") - logger.exception(exception) - error_str = str(exception) raise ActionFailedException( - f"Running action handler '{handler.name}' failed(Run {run_id}): {error_str}" + f"Failed to enter run context of handler {handler.name}(Run {run_id}): {str(exception)}." + + " See ER logs for more details" + ) from exception + + try: + + def get_run_context(param_type): + return run_context_instance + + # DI in `run` function is allowed only for action handlers in form of functions. + # `run` in classes may not have additional parameters, constructor parameters should + # be used instead. TODO: Validate? + args = await resolve_func_args_with_di( + func=handler_run_func, + known_args={"payload": get_run_payload, "run_context": get_run_context}, ) + # TODO: cache parameters + try: + logger.trace(f"Call handler {handler.name}(run {run_id})") + # there is also `inspect.iscoroutinefunction` but it cannot recognize coroutine + # functions which are class methods. Use `isawaitable` on result instead. + call_result = handler_run_func(**args) + if inspect.isawaitable(call_result): + execution_result = await call_result + else: + execution_result = call_result + except Exception as exception: + if isinstance(exception, code_action.StopActionRunWithResult): + action_result = exception.result + response = action_result_to_run_action_response(action_result, "string") + raise StopWithResponse(response=response) from exception + elif isinstance( + exception, iactionrunner.BaseRunActionException + ) or isinstance(exception, code_action.ActionFailedException): + error_str = exception.message + else: + logger.error("Unhandled exception in action handler:") + logger.exception(exception) + error_str = str(exception) + raise ActionFailedException( + f"Running action handler '{handler.name}' failed(Run {run_id}): {error_str}" + ) from exception + finally: + # exit run context + try: + await run_context_instance.__aexit__(None, None, None) + except Exception as exception: + raise ActionFailedException( + f"Failed to exit run context of handler {handler.name}(Run {run_id}): {str(exception)}." + + " See ER logs for more details" + ) from exception end_time = time.time_ns() duration = (end_time - start_time) / 1_000_000 diff --git a/finecode_extension_runner/src/finecode_extension_runner/api.proto b/finecode_extension_runner/src/finecode_extension_runner/api.proto deleted file mode 100644 index 2a13af6..0000000 --- a/finecode_extension_runner/src/finecode_extension_runner/api.proto +++ /dev/null @@ -1,25 +0,0 @@ -syntax = "proto3"; - -package finecode_extension_runner; - -message UpdateConfigRequest { - string working_dir = 1; - map config = 2; -} - -message UpdateConfigResponse { -} - -message RunActionRequest { - string action_name = 1; - string apply_on = 2; -} - -message RunActionResponse { - string result_text = 1; -} - -service ExtensionRunnerService { - rpc UpdateConfig(UpdateConfigRequest) returns (UpdateConfigResponse); - rpc RunAction(RunActionRequest) returns (RunActionResponse); -} diff --git a/finecode_extension_runner/src/finecode_extension_runner/cli.py b/finecode_extension_runner/src/finecode_extension_runner/cli.py index 73aea9f..0cbedc5 100644 --- a/finecode_extension_runner/src/finecode_extension_runner/cli.py +++ b/finecode_extension_runner/src/finecode_extension_runner/cli.py @@ -7,7 +7,7 @@ from loguru import logger import finecode_extension_runner.start as runner_start -from finecode_extension_runner import global_state +from finecode_extension_runner import global_state, logs @click.group() @@ -19,7 +19,6 @@ def main(): @main.command() @click.option("--trace", "trace", is_flag=True, default=False) @click.option("--debug", "debug", is_flag=True, default=False) -@click.option("--debug-port", "debug_port", type=int, default=5680) @click.option( "--project-path", "project_path", @@ -28,16 +27,24 @@ def main(): ) @click.option("--env-name", "env_name", type=str) def start( - trace: bool, debug: bool, debug_port: int, project_path: Path, env_name: str | None + trace: bool, + debug: bool, + project_path: Path, + env_name: str | None, ): + debug_port: int = 0 if debug is True: import debugpy # avoid debugger warnings printed to stdout, they affect I/O communication os.environ["PYDEVD_DISABLE_FILE_VALIDATION"] = "1" + + debug_port = runner_start._find_free_port() try: debugpy.listen(debug_port) + click.echo(f"Debug session: 127.0.0.1:{debug_port}") debugpy.wait_for_client() + debugpy.breakpoint() except Exception as e: logger.info(e) @@ -49,7 +56,18 @@ def start( global_state.project_dir_path = project_path global_state.env_name = env_name - runner_start.start_runner_sync(env_name) + log_file_path = (project_path + / ".venvs" + / env_name + / "logs" + / "runner.log") + + logs.setup_logging(log_level="INFO" if trace is False else "TRACE", log_file_path=log_file_path) + + if debug is True: + logger.info(f"Started debugger on 127.0.0.1:{debug_port}") + + runner_start.start_runner_sync() @main.command() diff --git a/finecode_extension_runner/src/finecode_extension_runner/context.py b/finecode_extension_runner/src/finecode_extension_runner/context.py index 88838a8..8e237f6 100644 --- a/finecode_extension_runner/src/finecode_extension_runner/context.py +++ b/finecode_extension_runner/src/finecode_extension_runner/context.py @@ -2,11 +2,14 @@ from dataclasses import dataclass, field from finecode_extension_runner import domain +from finecode_extension_api import service @dataclass class RunnerContext: project: domain.Project action_cache_by_name: dict[str, domain.ActionCache] = field(default_factory=dict) - # don't overwrite, only append and remove - docs_owned_by_client: list[str] = field(default_factory=list) + project_config_version: int = 0 + running_services: dict[service.Service, domain.RunningServiceInfo] = field( + default_factory=dict + ) diff --git a/finecode_extension_runner/src/finecode_extension_runner/di/bootstrap.py b/finecode_extension_runner/src/finecode_extension_runner/di/bootstrap.py index fd011db..6702b41 100644 --- a/finecode_extension_runner/src/finecode_extension_runner/di/bootstrap.py +++ b/finecode_extension_runner/src/finecode_extension_runner/di/bootstrap.py @@ -1,6 +1,7 @@ +import collections.abc import functools import pathlib -from typing import Any, Awaitable, Callable +from typing import Any, Callable try: import fine_python_ast @@ -21,6 +22,8 @@ iactionrunner, icache, icommandrunner, + idevenvinfoprovider, + ifileeditor, ifilemanager, ilogger, iprojectinfoprovider, @@ -28,12 +31,14 @@ iprojectfileclassifier, ipypackagelayoutinfoprovider, ) -from finecode_extension_runner import global_state, schemas +from finecode_extension_runner import domain from finecode_extension_runner._services import run_action from finecode_extension_runner.di import _state, resolver from finecode_extension_runner.impls import ( action_runner, command_runner, + dev_env_info_provider, + file_editor, file_manager, inmemory_cache, loguru_logger, @@ -44,32 +49,38 @@ def bootstrap( - get_document_func: Callable, - save_document_func: Callable, project_def_path_getter: Callable[[], pathlib.Path], - project_raw_config_getter: Callable[[str], Awaitable[dict[str, Any]]], + project_raw_config_getter: Callable[[str], collections.abc.Awaitable[dict[str, Any]]], + current_project_raw_config_version_getter: Callable[[], int], cache_dir_path_getter: Callable[[], pathlib.Path], + actions_names_getter: Callable[[], list[str]], + action_by_name_getter: Callable[[str], domain.Action], + current_env_name_getter: Callable[[], str] ): # logger_instance = loguru_logger.LoguruLogger() logger_instance = loguru_logger.get_logger() + command_runner_instance = command_runner.CommandRunner(logger=logger_instance) + dev_env_info_provider_instance = dev_env_info_provider.DevEnvInfoProvider(logger=logger_instance) file_manager_instance = file_manager.FileManager( - docs_owned_by_client=global_state.runner_context.docs_owned_by_client, - get_document_func=get_document_func, - save_document_func=save_document_func, logger=logger_instance, ) + file_editor_instance = file_editor.FileEditor(logger=logger_instance, file_manager=file_manager_instance) cache_instance = inmemory_cache.InMemoryCache( - file_manager=file_manager_instance, logger=logger_instance + file_editor=file_editor_instance, logger=logger_instance ) action_runner_instance = action_runner.ActionRunner( - internal_service_func=run_action_wrapper + run_action_func=run_action.run_action, + actions_names_getter=actions_names_getter, + action_by_name_getter=action_by_name_getter ) _state.container[ilogger.ILogger] = logger_instance _state.container[icommandrunner.ICommandRunner] = command_runner_instance _state.container[ifilemanager.IFileManager] = file_manager_instance + _state.container[ifileeditor.IFileEditor] = file_editor_instance _state.container[icache.ICache] = cache_instance _state.container[iactionrunner.IActionRunner] = action_runner_instance + _state.container[idevenvinfoprovider.IDevEnvInfoProvider] = dev_env_info_provider_instance if fine_python_ast is not None: _state.factories[fine_python_ast.IPythonSingleAstProvider] = ( @@ -83,11 +94,13 @@ def bootstrap( project_info_provider_factory, project_def_path_getter=project_def_path_getter, project_raw_config_getter=project_raw_config_getter, + current_project_raw_config_version_getter=current_project_raw_config_version_getter ) _state.factories[iextensionrunnerinfoprovider.IExtensionRunnerInfoProvider] = ( functools.partial( extension_runner_info_provider_factory, cache_dir_path_getter=cache_dir_path_getter, + current_env_name_getter=current_env_name_getter ) ) _state.factories[iprojectfileclassifier.IProjectFileClassifier] = ( @@ -102,23 +115,9 @@ def bootstrap( # TODO: parameters from config -async def run_action_wrapper( - action_name: str, payload: dict[str, Any] -) -> dict[str, Any]: - request = schemas.RunActionRequest(action_name=action_name, params=payload) - options = schemas.RunActionOptions(result_format="json") - - try: - response = await run_action.run_action(request=request, options=options) - except run_action.ActionFailedException as exception: - raise iactionrunner.ActionRunFailed(exception.message) - - return response.result - - def python_single_ast_provider_factory(container): return fine_python_ast.PythonSingleAstProvider( - file_manager=container[ifilemanager.IFileManager], + file_editor=container[ifileeditor.IFileEditor], cache=container[icache.ICache], logger=container[ilogger.ILogger], ) @@ -126,7 +125,7 @@ def python_single_ast_provider_factory(container): def mypy_single_ast_provider_factory(container): return fine_python_mypy.MypySingleAstProvider( - file_manager=container[ifilemanager.IFileManager], + file_editor=container[ifileeditor.IFileEditor], cache=container[icache.ICache], logger=container[ilogger.ILogger], ) @@ -135,21 +134,24 @@ def mypy_single_ast_provider_factory(container): def project_info_provider_factory( container, project_def_path_getter: Callable[[], pathlib.Path], - project_raw_config_getter: Callable[[str], Awaitable[dict[str, Any]]], + project_raw_config_getter: Callable[[str], collections.abc.Awaitable[dict[str, Any]]], + current_project_raw_config_version_getter: Callable[[], int] ): return project_info_provider.ProjectInfoProvider( project_def_path_getter=project_def_path_getter, project_raw_config_getter=project_raw_config_getter, + current_project_raw_config_version_getter=current_project_raw_config_version_getter ) async def extension_runner_info_provider_factory( container, cache_dir_path_getter: Callable[[], pathlib.Path], + current_env_name_getter: Callable[[], str] ): logger = await resolver.get_service_instance(ilogger.ILogger) return extension_runner_info_provider.ExtensionRunnerInfoProvider( - cache_dir_path_getter=cache_dir_path_getter, logger=logger + cache_dir_path_getter=cache_dir_path_getter, logger=logger, current_env_name_getter=current_env_name_getter ) @@ -169,8 +171,8 @@ async def project_file_classifier_factory( async def py_package_layout_info_provider_factory(container): - file_manager = await resolver.get_service_instance(ifilemanager.IFileManager) + file_editor = await resolver.get_service_instance(ifileeditor.IFileEditor) cache = await resolver.get_service_instance(icache.ICache) return fine_python_package_info.PyPackageLayoutInfoProvider( - file_manager=file_manager, cache=cache + file_editor=file_editor, cache=cache ) diff --git a/finecode_extension_runner/src/finecode_extension_runner/domain.py b/finecode_extension_runner/src/finecode_extension_runner/domain.py index c96bb28..d196eb4 100644 --- a/finecode_extension_runner/src/finecode_extension_runner/domain.py +++ b/finecode_extension_runner/src/finecode_extension_runner/domain.py @@ -5,7 +5,7 @@ import typing from pathlib import Path -from finecode_extension_api import code_action +from finecode_extension_api import code_action, service from finecode_extension_runner.impls import process_executor as process_executor_impl @@ -52,13 +52,11 @@ def __str__(self) -> str: class ActionExecInfo: def __init__( self, - payload_type: typing.Type[code_action.RunActionPayload] | None, - run_context_type: typing.Type[code_action.RunActionContext] | None, + payload_type: type[code_action.RunActionPayload] | None, + run_context_type: type[code_action.RunActionContext] | None, ) -> None: - self.payload_type: typing.Type[code_action.RunActionPayload] | None = ( - payload_type - ) - self.run_context_type: typing.Type[code_action.RunActionContext] | None = ( + self.payload_type: type[code_action.RunActionPayload] | None = payload_type + self.run_context_type: type[code_action.RunActionContext] | None = ( run_context_type ) # instantiation of process executor impl is cheap. To avoid analyzing all @@ -89,8 +87,11 @@ class ActionCache: @dataclasses.dataclass class ActionHandlerCache: + # set all values by default to None and cache will be filled step-by-step if step + # was successful instance: code_action.ActionHandler | None = None exec_info: ActionHandlerExecInfo | None = None + used_services: list[service.Service] | None = None class TextDocumentInfo: @@ -112,3 +113,8 @@ class TextDocumentNotOpened(Exception): ... class PartialResult(typing.NamedTuple): token: int | str value: typing.Any + + +@dataclasses.dataclass +class RunningServiceInfo: + used_by: list[code_action.ActionHandler] diff --git a/finecode_extension_runner/src/finecode_extension_runner/impls/action_runner.py b/finecode_extension_runner/src/finecode_extension_runner/impls/action_runner.py index 98bf5d8..6be6481 100644 --- a/finecode_extension_runner/src/finecode_extension_runner/impls/action_runner.py +++ b/finecode_extension_runner/src/finecode_extension_runner/impls/action_runner.py @@ -1,11 +1,32 @@ -from typing import Any - +import collections.abc +import typing +from finecode_extension_api import code_action from finecode_extension_api.interfaces import iactionrunner +from finecode_extension_runner import domain + class ActionRunner(iactionrunner.IActionRunner): - def __init__(self, internal_service_func): - self._internal_service_func = internal_service_func + def __init__(self, run_action_func: typing.Callable[[domain.Action, code_action.RunActionPayload, code_action.RunActionMeta], collections.abc.Coroutine[None, None, code_action.RunActionResult]], + actions_names_getter: typing.Callable[[], list[str]], + action_by_name_getter: typing.Callable[[str], domain.Action]): + self._run_action_func = run_action_func + self._actions_names_getter = actions_names_getter + self._action_by_name_getter = action_by_name_getter + + async def run_action( + self, action: type[code_action.Action[code_action.RunPayloadType, code_action.RunContextType, code_action.RunResultType]], payload: code_action.RunActionPayload, meta: code_action.RunActionMeta + ) -> code_action.RunActionResult: + try: + return await self._run_action_func(action, payload, meta) + except Exception as exception: + raise iactionrunner.ActionRunFailed(str(exception)) + + def get_actions_names(self) -> list[str]: + return self._actions_names_getter() - async def run_action(self, name: str, payload: dict[str, Any]) -> dict[str, Any]: - return await self._internal_service_func(action_name=name, payload=payload) + def get_action_by_name(self, name: str) -> type[code_action.Action[code_action.RunPayloadType, code_action.RunContextType, code_action.RunResultType]]: + try: + return self._action_by_name_getter(name) + except KeyError: + raise iactionrunner.ActionNotFound(f"Action '{name}' not found") diff --git a/finecode_extension_runner/src/finecode_extension_runner/impls/dev_env_info_provider.py b/finecode_extension_runner/src/finecode_extension_runner/impls/dev_env_info_provider.py new file mode 100644 index 0000000..754e4ba --- /dev/null +++ b/finecode_extension_runner/src/finecode_extension_runner/impls/dev_env_info_provider.py @@ -0,0 +1,39 @@ +# import pathlib +# import typing + +# from finecode_extension_api import common_types +from finecode_extension_api.interfaces import idevenvinfoprovider, ilogger + + +class DevEnvInfoProvider( + idevenvinfoprovider.IDevEnvInfoProvider +): + def __init__( + self, + logger: ilogger.ILogger, + # docs_owned_by_ide: list[str], + # get_document_func: typing.Callable, + # save_document_func: typing.Callable, + ) -> None: + self.logger = logger + # self.docs_owned_by_ide = docs_owned_by_ide + # self.get_document_func = get_document_func + # self.save_document_func = save_document_func + + # async def owned_files(self, dev_env: common_types.DevEnv) -> list[pathlib.Path]: + # ... + + # async def is_owner_of(self, dev_env: common_types.DevEnv, file_path: pathlib.Path) -> bool: + # ... + + # async def file_is_owned_by(self, file_path: pathlib.Path) -> list[common_types.DevEnv]: + # ... + + # async def files_owned_by_dev_envs(self) -> list[pathlib.Path]: + # ... + + # async def get_file_content(self, file_path: pathlib.Path) -> bytes: + # ... + + # async def save_file_content(self, file_path: pathlib.Path, file_content: bytes) -> None: + # ... diff --git a/finecode_extension_runner/src/finecode_extension_runner/impls/extension_runner_info_provider.py b/finecode_extension_runner/src/finecode_extension_runner/impls/extension_runner_info_provider.py index 236a5d6..508f152 100644 --- a/finecode_extension_runner/src/finecode_extension_runner/impls/extension_runner_info_provider.py +++ b/finecode_extension_runner/src/finecode_extension_runner/impls/extension_runner_info_provider.py @@ -8,13 +8,17 @@ class ExtensionRunnerInfoProvider( iextensionrunnerinfoprovider.IExtensionRunnerInfoProvider ): def __init__( - self, cache_dir_path_getter: Callable[[], pathlib.Path], logger: ilogger.ILogger + self, cache_dir_path_getter: Callable[[], pathlib.Path], logger: ilogger.ILogger, current_env_name_getter: Callable[[], str] ) -> None: self.cache_dir_path_getter = cache_dir_path_getter self.logger = logger + self.current_env_name_getter = current_env_name_getter self._site_packages_cache: dict[pathlib.Path, list[pathlib.Path]] = {} + def get_current_env_name(self) -> str: + return self.current_env_name_getter() + def get_cache_dir_path(self) -> pathlib.Path: return self.cache_dir_path_getter() @@ -25,6 +29,10 @@ def get_venv_dir_path_of_env(self, env_name: str) -> pathlib.Path: venvs_dir_path = current_venv_dir_path.parent return venvs_dir_path / env_name + def get_current_venv_dir_path(self) -> pathlib.Path: + current_env_name = self.get_current_env_name() + return self.get_venv_dir_path_of_env(env_name=current_env_name) + def get_venv_site_packages(self, venv_dir_path: pathlib.Path) -> list[pathlib.Path]: # venv site packages can be cached because they don't change and if user runs # prepare-envs or updates environment in any other way, current ER should be diff --git a/finecode_extension_runner/src/finecode_extension_runner/impls/file_editor.py b/finecode_extension_runner/src/finecode_extension_runner/impls/file_editor.py new file mode 100644 index 0000000..e1186da --- /dev/null +++ b/finecode_extension_runner/src/finecode_extension_runner/impls/file_editor.py @@ -0,0 +1,521 @@ +import asyncio +import contextlib +import collections.abc +import dataclasses +import pathlib +from typing import TypeVar + +from finecode_extension_api.interfaces import ifileeditor, ifilemanager, ilogger + + +T = TypeVar("T") + + +class MultiQueueIterator(collections.abc.AsyncIterator[T]): + """Merges multiple asyncio queues into a single async iterator. + + Supports dynamic addition and removal of queues during iteration. + """ + + def __init__(self, queues: list[asyncio.Queue[T]]) -> None: + self._queues: list[asyncio.Queue[T]] = queues + self._queues_changed_event: asyncio.Event = asyncio.Event() + self._shutdown_event: asyncio.Event = asyncio.Event() + + def shutdown(self) -> None: + """Shutdown the iterator, causing it to raise StopAsyncIteration.""" + self._shutdown_event.set() + + def add_queue(self, queue: asyncio.Queue[T]) -> None: + """Add a queue to be merged.""" + self._queues.append(queue) + self._queues_changed_event.set() + + def remove_queue(self, queue: asyncio.Queue[T]) -> None: + """Remove a queue from being merged.""" + if queue in self._queues: + self._queues.remove(queue) + self._queues_changed_event.set() + + def __aiter__(self) -> "MultiQueueIterator[T]": + return self + + async def __anext__(self) -> T: + while True: + if not self._queues: + raise StopAsyncIteration + + # Clear the event before starting wait + self._queues_changed_event.clear() + + # Create get tasks for all queues + tasks = {asyncio.create_task(queue.get()): queue for queue in self._queues} + + # Also wait for the queues changed event and shutdown event + queues_changed_task = asyncio.create_task(self._queues_changed_event.wait()) + shutdown_task = asyncio.create_task(self._shutdown_event.wait()) + # Wait for either a queue to have an item, queues to change, or shutdown + all_tasks = set(tasks.keys()) | {queues_changed_task, shutdown_task} + + try: + done, pending = await asyncio.wait( + all_tasks, return_when=asyncio.FIRST_COMPLETED + ) + + # Cancel all pending tasks + for task in pending: + task.cancel() + + # If shutdown, stop iteration + if shutdown_task in done: + raise StopAsyncIteration + + # If queues changed, restart the loop + if queues_changed_task in done: + continue + + # Get the result from the completed task + completed_task = done.pop() + result = await completed_task + + return result + except asyncio.CancelledError: + # Cancel all tasks on cancellation + for task in all_tasks: + if not task.done(): + task.cancel() + raise + finally: + # Make sure control tasks are cancelled if they're still pending + if not queues_changed_task.done(): + queues_changed_task.cancel() + if not shutdown_task.done(): + shutdown_task.cancel() + + async def aclose(self) -> None: + """Close the iterator and cleanup resources.""" + self.shutdown() + + +@dataclasses.dataclass +class OpenedFileInfo: + content: str + version: str + opened_by: list[ifileeditor.IFileEditorSession] + + +@dataclasses.dataclass +class BlockedFileInfo: + blocked_by: "FileEditorSession" + unblock_event: asyncio.Event + + +class FileEditorSession(ifileeditor.IFileEditorSession): + def __init__( + self, + logger: ilogger.ILogger, + author: ifileeditor.FileOperationAuthor, + file_manager: ifilemanager.IFileManager, + opened_files: dict[pathlib.Path, OpenedFileInfo], + blocked_files: dict[pathlib.Path, BlockedFileInfo], + subscriptions_by_session: dict[ + pathlib.Path, + dict[ + ifileeditor.IFileEditorSession, + asyncio.Queue[ifileeditor.FileChangeEvent], + ], + ], + ) -> None: + self.logger = logger + self.author = author + self._file_manager = file_manager + self._opened_files = opened_files + self._blocked_files = blocked_files + self._subscriptions_by_session = subscriptions_by_session + + # self._subscribed_to_opened_files = False + self._opened_file_subscription: ( + MultiQueueIterator[ifileeditor.FileChangeEvent] | None + ) = None + + @property + def _subscribed_to_opened_files(self) -> bool: + return self._opened_file_subscription is not None + + def close(self) -> None: + """Close the session and cleanup all resources.""" + # Shutdown active subscription first + if self._opened_file_subscription is not None: + self._opened_file_subscription.shutdown() + + # Clean up subscriptions + files_to_unsubscribe: list[pathlib.Path] = [] + for file_path, sessions_dict in self._subscriptions_by_session.items(): + if self in sessions_dict: + files_to_unsubscribe.append(file_path) + + for file_path in files_to_unsubscribe: + self._unsubscribe_from_file_changes(file_path=file_path) + + self._opened_file_subscription = None + + # Close all files opened by this session + files_to_close: list[pathlib.Path] = [] + for file_path, opened_file_info in self._opened_files.items(): + if self in opened_file_info.opened_by: + files_to_close.append(file_path) + + for file_path in files_to_close: + try: + opened_file_info = self._opened_files[file_path] + opened_file_info.opened_by.remove(self) + + # Remove file from opened_files if no sessions have it open + if len(opened_file_info.opened_by) == 0: + del self._opened_files[file_path] + except (KeyError, ValueError): + # File was already removed or session not in list + pass + + # Unblock files blocked by this session + files_to_unblock: list[pathlib.Path] = [] + for file_path, blocked_file_info in self._blocked_files.items(): + if blocked_file_info.blocked_by == self: + files_to_unblock.append(file_path) + + for file_path in files_to_unblock: + try: + blocked_file_info = self._blocked_files.pop(file_path) + blocked_file_info.unblock_event.set() + except KeyError: + # File was already unblocked + pass + + async def change_file( + self, file_path: pathlib.Path, change: ifileeditor.FileChange + ) -> None: + self.logger.trace(f"Change file {file_path}") + if file_path in self._opened_files: + opened_file_info = self._opened_files[file_path] + file_content = opened_file_info.content + new_file_content = FileEditorSession.apply_change_to_file_content( + change=change, file_content=file_content + ) + self._update_opened_file_info( + file_path=file_path, new_file_content=new_file_content + ) + else: + file_content = await self._file_manager.get_content(file_path=file_path) + new_file_content = FileEditorSession.apply_change_to_file_content( + change=change, file_content=file_content + ) + await self._file_manager.save_file( + file_path=file_path, file_content=new_file_content + ) + + # notify subscribers + if file_path in self._subscriptions_by_session: + self._notify_subscribers_about_file_change( + file_path=file_path, change=change + ) + + @staticmethod + def apply_change_to_file_content( + change: ifileeditor.FileChange, file_content: str + ) -> str: + if isinstance(change, ifileeditor.FileChangeFull): + return change.text + else: + # Split file content into lines + lines = file_content.splitlines(keepends=True) + + # Get start and end positions + start_line = change.range.start.line + start_char = change.range.start.character + end_line = change.range.end.line + end_char = change.range.end.character + + # Validate range + if start_line < 0 or end_line < 0: + raise ValueError(f"Invalid range: negative line numbers not allowed") + + if end_line < start_line or ( + end_line == start_line and end_char < start_char + ): + raise ValueError( + f"Invalid range: end position is before start position" + ) + + # For bounds checking: line indices beyond file length should be treated as + # appending to the end. LSP spec allows this for insertions at end of file, + # make it also here the same. + # However, if both start and end are beyond bounds, it's likely an error. + if start_line > len(lines): + raise ValueError( + f"Invalid range: start line {start_line} is beyond file length {len(lines)}" + ) + + # Build the new content + # Part before the change + before_parts: list[str] = [] + for i in range(start_line): + before_parts.append(lines[i]) + if start_line < len(lines): + before_parts.append(lines[start_line][:start_char]) + before = "".join(before_parts) + + # Part after the change + after_parts: list[str] = [] + if end_line < len(lines): + after_parts.append(lines[end_line][end_char:]) + for i in range(end_line + 1, len(lines)): + after_parts.append(lines[i]) + after = "".join(after_parts) + + return before + change.text + after + + @contextlib.asynccontextmanager + async def subscribe_to_changes_of_opened_files( + self, + ) -> collections.abc.AsyncIterator[ifileeditor.FileChangeEvent]: + if self._subscribed_to_opened_files is True: + raise ValueError("This session is already subscribed to opened files") + + change_queues: list[asyncio.Queue[ifileeditor.FileChangeEvent]] = [] + for file_path, opened_file_info in self._opened_files.items(): + if self in opened_file_info.opened_by: + change_queue = self._subscribe_to_file_changes(file_path=file_path) + change_queues.append(change_queue) + + self._opened_file_subscription = MultiQueueIterator(queues=change_queues) + + try: + yield self._opened_file_subscription + finally: + # Unsubscribe from all files + files_to_unsubscribe: list[pathlib.Path] = [] + for file_path, sessions_dict in self._subscriptions_by_session.items(): + if self in sessions_dict: + files_to_unsubscribe.append(file_path) + + for file_path in files_to_unsubscribe: + self._unsubscribe_from_file_changes(file_path=file_path) + + self._opened_file_subscription.shutdown() + self._opened_file_subscription = None + + def _subscribe_to_file_changes( + self, file_path: pathlib.Path + ) -> asyncio.Queue[ifileeditor.FileChangeEvent]: + if file_path not in self._subscriptions_by_session: + self._subscriptions_by_session[file_path] = {} + + change_queue: asyncio.Queue[ifileeditor.FileChangeEvent] = asyncio.Queue() + self._subscriptions_by_session[file_path][self] = change_queue + + return change_queue + + def _unsubscribe_from_file_changes( + self, file_path: pathlib.Path + ) -> asyncio.Queue[ifileeditor.FileChangeEvent]: + change_queue = self._subscriptions_by_session[file_path][self] + + del self._subscriptions_by_session[file_path][self] + + if len(self._subscriptions_by_session[file_path]) == 0: + del self._subscriptions_by_session[file_path] + + return change_queue + + def _notify_subscribers_about_file_change( + self, file_path: pathlib.Path, change: ifileeditor.FileChange + ) -> None: + # this method expects that there are subscriptions to this file + for change_queue in self._subscriptions_by_session[file_path].values(): + file_change_event = ifileeditor.FileChangeEvent( + file_path=file_path, author=self.author, change=change + ) + change_queue.put_nowait(file_change_event) + + async def open_file(self, file_path: pathlib.Path) -> None: + if file_path in self._opened_files: + # file is already opened by one of the sessions, just add current session to + # the `opened_by` list + opened_file_info = self._opened_files[file_path] + if self in opened_file_info.opened_by: + raise ifileeditor.FileAlreadyOpenError( + f"{file_path} is already opened in this session" + ) + + opened_file_info.opened_by.append(self) + else: + initial_file_content = await self._file_manager.get_content( + file_path=file_path + ) + initial_file_version = await self._file_manager.get_file_version( + file_path=file_path + ) + new_opened_file_info = OpenedFileInfo( + content=initial_file_content, + version=initial_file_version, + opened_by=[self], + ) + self._opened_files[file_path] = new_opened_file_info + + if self._subscribed_to_opened_files: + change_queue = self._subscribe_to_file_changes(file_path=file_path) + assert self._opened_file_subscription is not None + self._opened_file_subscription.add_queue(change_queue) + + async def save_opened_file(self, file_path: pathlib.Path) -> None: + if file_path not in self._opened_files: + raise ValueError(f"{file_path} is not opened") + opened_file_info = self._opened_files[file_path] + + if self not in opened_file_info.opened_by: + raise ValueError(f"{file_path} is not opened in this session") + + file_content = opened_file_info.content + await self._file_manager.save_file( + file_path=file_path, file_content=file_content + ) + + async def close_file(self, file_path: pathlib.Path) -> None: + if self._subscribed_to_opened_files: + change_queue = self._unsubscribe_from_file_changes(file_path=file_path) + assert self._opened_file_subscription is not None + self._opened_file_subscription.remove_queue(change_queue) + + try: + opened_file_info = self._opened_files[file_path] + try: + opened_file_info.opened_by.remove(self) + except ValueError: + raise ValueError(f"{file_path} is not opened in this session") + + if len(opened_file_info.opened_by) == 0: + del self._opened_files[file_path] + except KeyError: + raise ValueError(f"{file_path} is not opened") + + def _update_opened_file_info( + self, file_path: pathlib.Path, new_file_content: str + ) -> None: + # this method expects `file_path` is opened + opened_file_info = self._opened_files[file_path] + opened_file_info.content = new_file_content + new_version = hash(new_file_content) # or just increase? + opened_file_info.version = str(new_version) + + @contextlib.asynccontextmanager + async def read_file( + self, file_path: pathlib.Path, block: bool = False + ) -> collections.abc.AsyncIterator[ifileeditor.FileInfo]: + if file_path in self._blocked_files: + blocked_file_info = self._blocked_files[file_path] + if blocked_file_info.blocked_by == self: + raise ValueError( + f"{file_path} is blocked by this session, cannot read it" + ) + + unblock_event = blocked_file_info.unblock_event + await unblock_event.wait() + + if block: + blocked_file_info = BlockedFileInfo( + blocked_by=self, unblock_event=asyncio.Event() + ) + self._blocked_files[file_path] = blocked_file_info + try: + if file_path in self._opened_files: + opened_file_info = self._opened_files[file_path] + file_content = opened_file_info.content + file_version = opened_file_info.version + else: + file_content = await self._file_manager.get_content(file_path=file_path) + file_version = await self._file_manager.get_file_version( + file_path=file_path + ) + file_info = ifileeditor.FileInfo(content=file_content, version=file_version) + yield file_info + finally: + if block: + blocked_file_info = self._blocked_files.pop(file_path) + blocked_file_info.unblock_event.set() + + async def read_file_version(self, file_path: pathlib.Path) -> str: + if file_path in self._blocked_files: + blocked_file_info = self._blocked_files[file_path] + unblock_event = blocked_file_info.unblock_event + await unblock_event.wait() + + if file_path in self._opened_files: + opened_file_info = self._opened_files[file_path] + file_version = opened_file_info.version + else: + file_version = await self._file_manager.get_file_version( + file_path=file_path + ) + return file_version + + async def save_file(self, file_path: pathlib.Path, file_content: str) -> None: + await self._file_manager.save_file( + file_path=file_path, file_content=file_content + ) + + if file_path in self._opened_files: + self._update_opened_file_info( + file_path=file_path, new_file_content=file_content + ) + + if file_path in self._subscriptions_by_session: + file_change = ifileeditor.FileChangeFull(text=file_content) + self._notify_subscribers_about_file_change( + file_path=file_path, change=file_change + ) + + +class FileEditor(ifileeditor.IFileEditor): + def __init__( + self, logger: ilogger.ILogger, file_manager: ifilemanager.IFileManager + ) -> None: + self.logger = logger + self.file_manager = file_manager + + self._opened_files: dict[pathlib.Path, OpenedFileInfo] = {} + self._blocked_files: dict[pathlib.Path, BlockedFileInfo] = {} + self._sessions: list[FileEditorSession] = [] + self._author_by_session: dict[ + ifileeditor.IFileEditorSession, ifileeditor.FileOperationAuthor + ] = {} + self._subscriptions_by_session: dict[ + pathlib.Path, + dict[ + ifileeditor.IFileEditorSession, + asyncio.Queue[ifileeditor.FileChangeEvent], + ], + ] = {} + + @contextlib.asynccontextmanager + async def session( + self, author: ifileeditor.FileOperationAuthor + ) -> collections.abc.AsyncIterator[ifileeditor.IFileEditorSession]: + new_session = FileEditorSession( + logger=self.logger, + author=author, + file_manager=self.file_manager, + opened_files=self._opened_files, + blocked_files=self._blocked_files, + subscriptions_by_session=self._subscriptions_by_session, + ) + self._sessions.append(new_session) + self._author_by_session[new_session] = author + try: + yield new_session + finally: + new_session.close() + self._sessions.remove(new_session) + del self._author_by_session[new_session] + + def get_opened_files(self) -> list[pathlib.Path]: + return list(self._opened_files.keys()) diff --git a/finecode_extension_runner/src/finecode_extension_runner/impls/file_manager.py b/finecode_extension_runner/src/finecode_extension_runner/impls/file_manager.py index 70ee436..3033295 100644 --- a/finecode_extension_runner/src/finecode_extension_runner/impls/file_manager.py +++ b/finecode_extension_runner/src/finecode_extension_runner/impls/file_manager.py @@ -1,65 +1,25 @@ import hashlib import shutil from pathlib import Path -from typing import Callable from finecode_extension_api.interfaces import ifilemanager, ilogger -from finecode_extension_runner import domain class FileManager(ifilemanager.IFileManager): def __init__( self, - docs_owned_by_client: list[str], - get_document_func: Callable, - save_document_func: Callable, logger: ilogger.ILogger, ) -> None: - self.docs_owned_by_client = docs_owned_by_client - self.get_document_func = get_document_func - self.save_document_func = save_document_func self.logger = logger async def get_content(self, file_path: Path) -> str: - file_uri = f"file://{file_path.as_posix()}" - file_content: str = "" - - if file_uri in self.docs_owned_by_client: - # docs owned by client cannot be cached, always read from client - try: - document_info = await self.get_document_func(file_uri) - file_content = document_info.text - except domain.TextDocumentNotOpened: - file_content = self.read_content_file_from_fs(file_path=file_path) - else: - file_content = self.read_content_file_from_fs(file_path=file_path) + file_content = self.read_content_file_from_fs(file_path=file_path) return file_content async def get_file_version(self, file_path: Path) -> str: - file_uri = path_to_uri_str(file_path) file_version: str = "" - - if file_uri in self.docs_owned_by_client: - # read file from client - try: - document_info = await self.get_document_func(file_uri) - file_version = str(document_info.version) - except domain.TextDocumentNotOpened: - file_version = self.get_hash_of_file_from_fs(file_path=file_path) - else: - # TODO - # st = file_path.stat() - # file_version = f'{st.st_size},{st.st_mtime}' - # if st.st_size != old.st_size: - # return True - # if st.st_mtime != old.st_mtime: - # new_hash = Cache.hash_digest(res_src) - # if new_hash != old.hash: - # return True - # return False - - file_version = self.get_hash_of_file_from_fs(file_path=file_path) + file_version = self.get_hash_of_file_from_fs(file_path=file_path) # 12 chars is enough to distinguish. The whole value is 64 chars length and # is not really needed in logs @@ -68,12 +28,8 @@ async def get_file_version(self, file_path: Path) -> str: return file_version async def save_file(self, file_path: Path, file_content: str) -> None: - file_uri = path_to_uri_str(file_path) - if file_uri in self.docs_owned_by_client: - await self.save_document_func(file_uri, file_content) - else: - with open(file_path, "w") as f: - f.write(file_content) + with open(file_path, "w") as f: + f.write(file_content) async def create_dir( self, dir_path: Path, create_parents: bool = True, exist_ok: bool = True @@ -101,7 +57,3 @@ def get_hash_of_file_from_fs(self, file_path: Path) -> str: file_version = hashlib.file_digest(f, "sha256").hexdigest() return file_version - - -def path_to_uri_str(path: Path) -> str: - return f"file://{path.as_posix()}" diff --git a/finecode_extension_runner/src/finecode_extension_runner/impls/inmemory_cache.py b/finecode_extension_runner/src/finecode_extension_runner/impls/inmemory_cache.py index 0e8edc7..ea050d4 100644 --- a/finecode_extension_runner/src/finecode_extension_runner/impls/inmemory_cache.py +++ b/finecode_extension_runner/src/finecode_extension_runner/impls/inmemory_cache.py @@ -1,16 +1,20 @@ from pathlib import Path from typing import Any, TypeAlias -from finecode_extension_api.interfaces import icache, ifilemanager, ilogger +from finecode_extension_api.interfaces import icache, ifileeditor, ilogger CacheKeyType: TypeAlias = str class InMemoryCache(icache.ICache): + FILE_OPERATION_AUTHOR = ifileeditor.FileOperationAuthor( + id="InMemoryCache" + ) + def __init__( - self, file_manager: ifilemanager.IFileManager, logger: ilogger.ILogger + self, file_editor: ifileeditor.IFileEditor, logger: ilogger.ILogger ): - self.file_manager = file_manager + self.file_editor = file_editor self.logger = logger self.cache_by_file: dict[Path, dict[CacheKeyType, tuple[str, Any]]] = {} @@ -20,7 +24,10 @@ def __init__( async def save_file_cache( self, file_path: Path, file_version: str, key: CacheKeyType, value: Any ) -> None: - current_file_version = await self.file_manager.get_file_version(file_path) + async with self.file_editor.session( + author=self.FILE_OPERATION_AUTHOR + ) as session: + current_file_version = await session.read_file_version(file_path) if file_version != current_file_version: # `value` was created for older version of file, don't save it @@ -35,9 +42,9 @@ async def save_file_cache( async def get_file_cache(self, file_path: Path, key: CacheKeyType) -> Any: try: file_cache = self.cache_by_file[file_path] - except KeyError: + except KeyError as exception: self.logger.debug(f"No cache for file {file_path}, cache miss") - raise icache.CacheMissException() + raise icache.CacheMissException() from exception if key not in file_cache: self.logger.debug( @@ -45,7 +52,11 @@ async def get_file_cache(self, file_path: Path, key: CacheKeyType) -> Any: ) raise icache.CacheMissException() - current_file_version = await self.file_manager.get_file_version(file_path) + async with self.file_editor.session( + author=self.FILE_OPERATION_AUTHOR + ) as session: + current_file_version = await session.read_file_version(file_path) + cached_file_version = file_cache[key][0] if cached_file_version != current_file_version: self.logger.debug( diff --git a/finecode_extension_runner/src/finecode_extension_runner/impls/project_info_provider.py b/finecode_extension_runner/src/finecode_extension_runner/impls/project_info_provider.py index 5f2d06d..9d5630d 100644 --- a/finecode_extension_runner/src/finecode_extension_runner/impls/project_info_provider.py +++ b/finecode_extension_runner/src/finecode_extension_runner/impls/project_info_provider.py @@ -1,5 +1,6 @@ +import collections.abc import pathlib -from typing import Any, Awaitable, Callable +from typing import Any, Callable from finecode_extension_api.interfaces import iprojectinfoprovider @@ -8,10 +9,12 @@ class ProjectInfoProvider(iprojectinfoprovider.IProjectInfoProvider): def __init__( self, project_def_path_getter: Callable[[], pathlib.Path], - project_raw_config_getter: Callable[[str], Awaitable[dict[str, Any]]], + project_raw_config_getter: Callable[[str], collections.abc.Awaitable[dict[str, Any]]], + current_project_raw_config_version_getter: Callable[[], int] ) -> None: self.project_def_path_getter = project_def_path_getter self.project_raw_config_getter = project_raw_config_getter + self.current_project_raw_config_version_getter = current_project_raw_config_version_getter def get_current_project_dir_path(self) -> pathlib.Path: project_def_path = self.project_def_path_getter() @@ -38,3 +41,6 @@ async def get_project_raw_config( async def get_current_project_raw_config(self) -> dict[str, Any]: current_project_path = self.get_current_project_def_path() return await self.get_project_raw_config(project_def_path=current_project_path) + + def get_current_project_raw_config_version(self) -> int: + return self.current_project_raw_config_version_getter() diff --git a/finecode_extension_runner/src/finecode_extension_runner/logs.py b/finecode_extension_runner/src/finecode_extension_runner/logs.py index 942bcdb..c9d4d30 100644 --- a/finecode_extension_runner/src/finecode_extension_runner/logs.py +++ b/finecode_extension_runner/src/finecode_extension_runner/logs.py @@ -1,6 +1,8 @@ import enum import io import sys +import inspect +import logging from pathlib import Path from loguru import logger @@ -49,8 +51,46 @@ def save_logs_to_file( logger.add(sys.stdout, level=log_level) + # Find the file with the largest ID in the log directory + log_dir_path = file_path.parent + max_id = 0 + + log_files_with_ids: list[tuple[int, Path]] = [] + if log_dir_path.exists(): + for log_file in log_dir_path.iterdir(): + if log_file.is_file() and log_file.suffix == '.log': + # Extract numeric ID from the end of the filename (before extension) + # first split by dot because loguru adds datetime after dot: + # ..log , we need stem without datetime + stem = log_file.stem.split('.')[0] + parts = stem.split('_') + last_part = parts[-1] + if last_part.isdigit(): + file_id = int(last_part) + max_id = max(max_id, file_id) + log_files_with_ids.append((file_id, log_file)) + + # Remove the oldest files if there are more than 10 + if len(log_files_with_ids) >= 10: + # Sort by ID (oldest first) + log_files_with_ids.sort(key=lambda x: x[0]) + # Keep only the 9 most recent, so after adding the new one we'll have 10 + files_to_remove = log_files_with_ids[:-9] + for _, log_file in files_to_remove: + try: + log_file.unlink() + logger.trace(f"Removed old log file: {log_file}") + except Exception as e: + logger.warning(f"Failed to remove old log file {log_file}: {e}") + + # Get next ID for new log file + next_id = max_id + 1 + + # Update file_path with the new ID + file_path_with_id = file_path.with_stem(file_path.stem + '_' + str(next_id)) + logger.add( - str(file_path), + str(file_path_with_id), rotation=rotation, retention=retention, level=log_level, @@ -70,4 +110,53 @@ def reset_log_level_for_group(group: str): del log_level_by_group[group] -__all__ = ["save_logs_to_file", "set_log_level_for_group", "reset_log_level_for_group"] +def setup_logging(log_level: str, log_file_path: Path) -> None: + logger.remove() + + # disable logging raw messages + # TODO: make configurable + # disable logging all raw sent messages + logger.configure(activation=[("pygls.protocol.json_rpc", False)]) + + # ~~extension runner communicates with workspace manager with tcp, we can print logs + # to stdout as well~~. See README.md + save_logs_to_file( + file_path=log_file_path, + log_level=log_level, + stdout=True, + ) + + # pygls uses standard python logger, intercept it and pass logs to loguru + class InterceptHandler(logging.Handler): + def emit(self, record: logging.LogRecord) -> None: + # Get corresponding Loguru level if it exists. + level: str | int + try: + level = logger.level(record.levelname).name + except ValueError: + level = record.levelno + + # Find caller from where originated the logged message. + frame, depth = inspect.currentframe(), 0 + while frame and ( + depth == 0 or frame.f_code.co_filename == logging.__file__ + ): + frame = frame.f_back + depth += 1 + + logger.opt(depth=depth, exception=record.exc_info).log( + level, record.getMessage() + ) + + logging.basicConfig(handlers=[InterceptHandler()], level=0, force=True) + + # TODO: make configurable + set_log_level_for_group( + "finecode_extension_runner.impls.file_manager", LogLevel.WARNING + ) + set_log_level_for_group( + "finecode_extension_runner.impls.inmemory_cache", LogLevel.WARNING + ) + + +__all__ = ["save_logs_to_file", "set_log_level_for_group", "reset_log_level_for_group", "setup_logging"] diff --git a/finecode_extension_runner/src/finecode_extension_runner/lsp_server.py b/finecode_extension_runner/src/finecode_extension_runner/lsp_server.py index 96be19c..9ce796a 100644 --- a/finecode_extension_runner/src/finecode_extension_runner/lsp_server.py +++ b/finecode_extension_runner/src/finecode_extension_runner/lsp_server.py @@ -6,6 +6,7 @@ from __future__ import annotations import atexit +import collections.abc import dataclasses import functools import json @@ -13,18 +14,23 @@ import typing import pygls.exceptions as pygls_exceptions +from pygls.workspace import position_codec from loguru import logger from lsprotocol import types from pygls.lsp import server as lsp_server from pygls.io_ import StdoutWriter, run_async - from finecode_extension_api import code_action -from finecode_extension_runner import domain, schemas, services +from finecode_extension_api.interfaces import ifileeditor +from pydantic.dataclasses import dataclass as pydantic_dataclass + +from finecode_extension_runner import schemas, services from finecode_extension_runner._services import run_action as run_action_service +from finecode_extension_runner.di import resolver import sys import io import threading +import contextlib import asyncio @@ -59,6 +65,8 @@ async def readline(self) -> bytes: return line except TimeoutError: ... + except ValueError as exception: + logger.warning(str(exception)) return bytes() async def readexactly(self, n: int) -> bytes: @@ -88,11 +96,26 @@ def stop(self) -> None: class CustomLanguageServer(lsp_server.LanguageServer): + def __init__(self, *args, **kwargs) -> None: + super().__init__(*args, **kwargs) + + self._finecode_async_tasks: list[asyncio.Task] = [] + self._finecode_exit_stack = contextlib.AsyncExitStack() + self._finecode_file_editor_session: ifileeditor.IFileEditorSession + self._finecode_file_operation_author = ifileeditor.FileOperationAuthor(id=self.name) + def report_server_error(self, error: Exception, source: lsp_server.ServerErrors): + logger.info(f'->1 {self._stop_event.is_set()}') # return logging of error (`lsp_server.LanguageServer` overwrites it) super(lsp_server.LanguageServer, self).report_server_error(error, source) + logger.info(f'->2 {self._stop_event.is_set()}') + # log traceback of exception for easier analysis + logger.exception(error) # send to client - super().report_server_error(error, source) + if not isinstance(error, ValueError): + # TODO: check message 'write to closed file' + super().report_server_error(error, source) + logger.info(f'->3 {self._stop_event.is_set()}') async def start_io_async( self, stdin: io.BinaryIO | None = None, stdout: io.BinaryIO | None = None @@ -120,11 +143,89 @@ async def start_io_async( logger.info("exception handler in json rpc server") pass finally: + logger.info(f'->5 {self._stop_event.is_set()}') reader.stop() self.shutdown() + # shutdown is synchronous, so close exit stack here + await self._finecode_exit_stack.aclose() + logger.debug("Finecode async exit stack closed") + + def start_tcp(self, host: str, port: int) -> None: + """Starts TCP server.""" + logger.info("Starting TCP server on %s:%s", host, port) + + self._stop_event = stop_event = threading.Event() + + async def lsp_connection( + reader: asyncio.StreamReader, writer: asyncio.StreamWriter + ): + logger.debug("Connected to client") + self.protocol.set_writer(writer) # type: ignore + await run_async( + stop_event=stop_event, + reader=reader, + protocol=self.protocol, + logger=logger, + error_handler=self.report_server_error, + ) + logger.debug("Main loop finished") + self.shutdown() + + async def tcp_server(h: str, p: int): + self._server = await asyncio.start_server(lsp_connection, h, p) + + addrs = ", ".join(str(sock.getsockname()) for sock in self._server.sockets) + logger.info(f"Serving on {addrs}") + + try: + async with self._server: + await self._server.serve_forever() + finally: + # shutdown is synchronous, so close exit stack here + # TODO: test + await self._finecode_exit_stack.aclose() + + try: + asyncio.run(tcp_server(host, port)) + except asyncio.CancelledError: + logger.debug("Server was cancelled") + + + +def file_editor_file_change_to_lsp_text_edit(file_change: ifileeditor.FileChange) -> types.TextEdit: + if isinstance(file_change, ifileeditor.FileChangeFull): + # temporary workaround until we extend "applyWorkspaceEdit" from LSP with + # proper full document replacement without knowing original range + range_start_line = 0 + range_start_char = 0 + range_end_line = 999999 + range_end_char = 999999 + else: + range_start_line = file_change.range.start.line + range_start_char = file_change.range.start.character + range_end_line = file_change.range.end.line + range_end_char = file_change.range.end.character + + return types.TextEdit( + range=types.Range( + start=types.Position(line=range_start_line, character=range_start_char), + end=types.Position(line=range_end_line, character=range_end_char) + ), + new_text=file_change.text + ) + + +def position_from_client_units( + self, lines: collections.abc.Sequence[str], position: types.Position +) -> types.Position: + return position + def create_lsp_server() -> lsp_server.LanguageServer: + # avoid recalculating of positions by pygls + position_codec.PositionCodec.position_from_client_units = position_from_client_units + server = CustomLanguageServer("FineCode_Extension_Runner_Server", "v1") register_initialized_feature = server.feature(types.INITIALIZED) @@ -141,6 +242,9 @@ def create_lsp_server() -> lsp_server.LanguageServer: register_document_did_close_feature = server.feature(types.TEXT_DOCUMENT_DID_CLOSE) register_document_did_close_feature(_document_did_close) + + register_document_did_change_feature = server.feature(types.TEXT_DOCUMENT_DID_CHANGE) + register_document_did_change_feature(_document_did_change) register_update_config_feature = server.command("finecodeRunner/updateConfig") register_update_config_feature(update_config) @@ -172,17 +276,24 @@ def send_partial_result( server.progress(types.ProgressParams(token=token, value=partial_result_json)) run_action_service.set_partial_result_sender(send_partial_result) - + return server -def _on_initialized(ls: lsp_server.LanguageServer, params: types.InitializedParams): +def _on_initialized(ls: CustomLanguageServer, params: types.InitializedParams): logger.info(f"initialized {params}") -def _on_shutdown(ls: lsp_server.LanguageServer, params): +def _on_shutdown(ls: CustomLanguageServer, params): logger.info("Shutdown extension runner") services.shutdown_all_action_handlers() + + logger.debug("Stop Finecode async tasks") + for task in ls._finecode_async_tasks: + if not task.done(): + task.cancel() + ls._finecode_async_tasks = [] + logger.info("Shutdown end") return None @@ -191,73 +302,48 @@ def _on_exit(ls: lsp_server.LanguageServer, params): logger.info("Exit extension runner") -def _document_did_open( - ls: lsp_server.LanguageServer, params: types.DidOpenTextDocumentParams +def uri_to_path(uri: str) -> pathlib.Path: + return pathlib.Path(uri.removeprefix('file://')) + + +async def _document_did_open( + ls: CustomLanguageServer, params: types.DidOpenTextDocumentParams ): logger.info(f"document did open: {params.text_document.uri}") - services.document_did_open(params.text_document.uri) + # services.document_did_open(params.text_document.uri) + file_path = uri_to_path(uri=params.text_document.uri) + + await ls._finecode_file_editor_session.open_file(file_path=file_path) -def _document_did_close( - ls: lsp_server.LanguageServer, params: types.DidCloseTextDocumentParams +async def _document_did_close( + ls: CustomLanguageServer, params: types.DidCloseTextDocumentParams ): logger.info(f"document did close: {params.text_document.uri}") - services.document_did_close(params.text_document.uri) + file_path = uri_to_path(uri=params.text_document.uri) + + await ls._finecode_file_editor_session.close_file(file_path=file_path) -async def document_requester(server: lsp_server.LanguageServer, uri: str): - try: - document = await asyncio.wait_for( - server.protocol.send_request_async("documents/get", params={"uri": uri}), 10 - ) - except TimeoutError as error: - raise error - except pygls_exceptions.JsonRpcInternalError as error: - if error.message == "Exception: Document is not opened": - raise domain.TextDocumentNotOpened() - else: - raise error - - return domain.TextDocumentInfo( - uri=document.uri, version=document.version, text=document.text - ) +def lsp_document_change_to_file_editor_change(lsp_change: types.TextDocumentContentChangeEvent) -> ifileeditor.FileChange: + if isinstance(lsp_change, types.TextDocumentContentChangePartial): + return ifileeditor.FileChangePartial(range=ifileeditor.Range(start=ifileeditor.Position(line=lsp_change.range.start.line, character=lsp_change.range.start.character), end=ifileeditor.Position(line=lsp_change.range.end.line, character=lsp_change.range.end.character)), text=lsp_change.text) + elif isinstance(lsp_change, types.TextDocumentContentChangeWholeDocument): + return ifileeditor.FileChangeFull(text=lsp_change.text) + else: + logger.error(f"Unexpected type of document change from LSP client: {type(lsp_change)}") -async def document_saver(server: lsp_server.LanguageServer, uri: str, content: str): - try: - document = await asyncio.wait_for( - server.protocol.send_request_async("documents/get", params={"uri": uri}), 10 - ) - except TimeoutError as error: - raise error +async def _document_did_change( + ls: CustomLanguageServer, params: types.DidChangeTextDocumentParams +): + logger.info(f"document did change: {params.text_document.uri} {params.text_document.version}") + file_path = uri_to_path(uri=params.text_document.uri) - document_lines = document.text.split("\n") - params = types.ApplyWorkspaceEditParams( - edit=types.WorkspaceEdit( - # dict seems to be incorrectly unstructured on client(pygls issue?) - # use document_changes instead of changes - document_changes=[ - types.TextDocumentEdit( - text_document=types.OptionalVersionedTextDocumentIdentifier( - uri=uri - ), - edits=[ - types.TextEdit( - range=types.Range( - start=types.Position(line=0, character=0), - end=types.Position( - line=len(document_lines), - character=len(document_lines[-1]), - ), - ), - new_text=content, - ) - ], - ) - ] - ) - ) - await server.workspace_apply_edit_async(params) + for change in params.content_changes: + logger.trace(str(change)) + file_editor_change = lsp_document_change_to_file_editor_change(lsp_change=change) + await ls._finecode_file_editor_session.change_file(file_path=file_path, change=file_editor_change) async def get_project_raw_config( @@ -279,7 +365,7 @@ async def get_project_raw_config( async def update_config( - ls: lsp_server.LanguageServer, + ls: CustomLanguageServer, working_dir: pathlib.Path, project_name: str, project_def_path: pathlib.Path, @@ -314,10 +400,37 @@ async def update_config( ) response = await services.update_config( request=request, - document_requester=functools.partial(document_requester, ls), - document_saver=functools.partial(document_saver, ls), project_raw_config_getter=functools.partial(get_project_raw_config, ls), ) + # update_config calls DI bootstrap, we can instantiate file_editor_session first + # here + file_editor = await resolver.get_service_instance(ifileeditor.IFileEditor) + ls._finecode_file_editor_session = await ls._finecode_exit_stack.enter_async_context(file_editor.session(author=ls._finecode_file_operation_author)) + + # asyncio event loop is currently available only in handlers, not in server factory, + # so start task here + async def send_changed_files_to_lsp_client() -> None: + async with ls._finecode_file_editor_session.subscribe_to_changes_of_opened_files() as file_change_events: + async for file_change_event in file_change_events: + if file_change_event.author != ls._finecode_file_operation_author: + # someone else changed the file, send these changes to LSP client + params = types.ApplyWorkspaceEditParams( + edit=types.WorkspaceEdit( + document_changes=[ + types.TextDocumentEdit( + text_document=types.OptionalVersionedTextDocumentIdentifier(uri=f'file://{file_change_event.file_path.as_posix()}'), + edits=[ + file_editor_file_change_to_lsp_text_edit(file_change=file_change_event.change) + ] + ), + ] + ) + ) + await ls.workspace_apply_edit_async(params) + + send_changed_files_task = asyncio.create_task(send_changed_files_to_lsp_client()) + ls._finecode_async_tasks.append(send_changed_files_task) + return response.to_dict() except Exception as e: logger.exception(f"Update config error: {e}") @@ -350,11 +463,16 @@ async def run_action( ): logger.trace(f"Run action: {action_name}") request = schemas.RunActionRequest(action_name=action_name, params=params) - options_schema = schemas.RunActionOptions(**options if options is not None else {}) + + # use pydantic dataclass to convert dict to dataclass instance recursively + # (default dataclass constructor doesn't handle nested items, it stores them just + # as dict) + options_type = pydantic_dataclass(schemas.RunActionOptions) + options_schema = options_type(**options if options is not None else {}) status: str = "success" try: - response = await services.run_action(request=request, options=options_schema) + response = await services.run_action_raw(request=request, options=options_schema) except Exception as exception: if isinstance(exception, services.StopWithResponse): status = "stopped" diff --git a/finecode_extension_runner/src/finecode_extension_runner/schemas.py b/finecode_extension_runner/src/finecode_extension_runner/schemas.py index 6687897..7e9c904 100644 --- a/finecode_extension_runner/src/finecode_extension_runner/schemas.py +++ b/finecode_extension_runner/src/finecode_extension_runner/schemas.py @@ -2,6 +2,8 @@ from pathlib import Path from typing import Any, Literal +from finecode_extension_api import code_action + @dataclass class BaseSchema: @@ -45,6 +47,7 @@ class RunActionRequest(BaseSchema): @dataclass class RunActionOptions(BaseSchema): + meta: code_action.RunActionMeta partial_result_token: int | str | None = None result_format: Literal["json"] | Literal["string"] = "json" diff --git a/finecode_extension_runner/src/finecode_extension_runner/services.py b/finecode_extension_runner/src/finecode_extension_runner/services.py index 46d34b5..f235b76 100644 --- a/finecode_extension_runner/src/finecode_extension_runner/services.py +++ b/finecode_extension_runner/src/finecode_extension_runner/services.py @@ -1,3 +1,6 @@ +import json +import collections.abc +import hashlib import importlib import sys import types @@ -5,22 +8,35 @@ from pathlib import Path from loguru import logger +from finecode_extension_api import service from finecode_extension_runner import context, domain, global_state, schemas from finecode_extension_runner._services.run_action import ( ActionFailedException, StopWithResponse, - run_action, + run_action_raw, ) from finecode_extension_runner.di import bootstrap as di_bootstrap +def _compute_request_hash(request: schemas.UpdateConfigRequest) -> int: + """Compute a hash of the request object for version tracking.""" + request_dict = request.to_dict() + # Convert Path objects to strings for JSON serialization + request_dict["working_dir"] = str(request_dict["working_dir"]) + request_dict["project_def_path"] = str(request_dict["project_def_path"]) + + # Sort keys for consistent hashing + request_json = json.dumps(request_dict, sort_keys=True) + hash_bytes = hashlib.sha256(request_json.encode()).digest() + # Convert first 8 bytes to integer for version number + return int.from_bytes(hash_bytes[:8], byteorder="big") + + async def update_config( request: schemas.UpdateConfigRequest, - document_requester: typing.Callable, - document_saver: typing.Callable, project_raw_config_getter: typing.Callable[ - [str], typing.Awaitable[dict[str, typing.Any]] + [str], collections.abc.Awaitable[dict[str, typing.Any]] ], ) -> schemas.UpdateConfigResponse: project_dir_path = Path(request.working_dir) @@ -53,6 +69,7 @@ async def update_config( action_handler_configs=request.action_handler_configs, ), ) + global_state.runner_context.project_config_version = _compute_request_hash(request) # currently update_config is called only once directly after runner start. So we can # bootstrap here. Should be changed after adding updating configuration on the fly. @@ -71,12 +88,28 @@ def cache_dir_path_getter() -> Path: return project_cache_dir + def current_project_raw_config_version_getter() -> int: + return global_state.runner_context.project_config_version + + def actions_names_getter() -> list[str]: + assert global_state.runner_context is not None + return list(global_state.runner_context.project.actions.keys()) + + def action_by_name_getter(action_name: str) -> domain.Action: + assert global_state.runner_context is not None + return global_state.runner_context.project.actions[action_name] + + def current_env_name_getter() -> str: + return global_state.env_name + di_bootstrap.bootstrap( - get_document_func=document_requester, - save_document_func=document_saver, project_def_path_getter=project_def_path_getter, project_raw_config_getter=project_raw_config_getter, cache_dir_path_getter=cache_dir_path_getter, + current_project_raw_config_version_getter=current_project_raw_config_version_getter, + actions_names_getter=actions_names_getter, + action_by_name_getter=action_by_name_getter, + current_env_name_getter=current_env_name_getter, ) return schemas.UpdateConfigResponse() @@ -109,7 +142,10 @@ def reload_action(action_name: str) -> None: if handler_cache.exec_info is not None: shutdown_action_handler( action_handler_name=handler_name, + handler_instance=handler_cache.instance, exec_info=handler_cache.exec_info, + used_services=handler_cache.used_services, + runner_context=global_state.runner_context, ) del global_state.runner_context.action_cache_by_name[action_name] @@ -155,18 +191,12 @@ def resolve_package_path(package_name: str) -> str: return package_path -def document_did_open(document_uri: str) -> None: - if global_state.runner_context is not None: - global_state.runner_context.docs_owned_by_client.append(document_uri) - - -def document_did_close(document_uri: str) -> None: - if global_state.runner_context is not None: - global_state.runner_context.docs_owned_by_client.remove(document_uri) - - def shutdown_action_handler( - action_handler_name: str, exec_info: domain.ActionHandlerExecInfo + action_handler_name: str, + handler_instance: domain.ActionHandler | None, + exec_info: domain.ActionHandlerExecInfo, + used_services: list[service.Service], + runner_context: context.RunnerContext, ) -> None: # action handler exec info expected to exist in runner_context if exec_info.status == domain.ActionHandlerExecInfoStatus.SHUTDOWN: @@ -183,6 +213,18 @@ def shutdown_action_handler( logger.error(f"Failed to shutdown action {action_handler_name}: {e}") exec_info.status = domain.ActionHandlerExecInfoStatus.SHUTDOWN + if handler_instance is not None: + for used_service in used_services: + running_service_info = runner_context.running_services[used_service] + running_service_info.used_by.remove(handler_instance) + if len(running_service_info.used_by) == 0: + if isinstance(used_service, service.DisposableService): + try: + used_service.dispose() + except Exception as exception: + logger.error(f"Failed to dispose service: {used_service}") + logger.exception(exception) + def shutdown_all_action_handlers() -> None: if global_state.runner_context is not None: @@ -195,7 +237,10 @@ def shutdown_all_action_handlers() -> None: if handler_cache.exec_info is not None: shutdown_action_handler( action_handler_name=handler_name, + handler_instance=handler_cache.instance, exec_info=handler_cache.exec_info, + used_services=handler_cache.used_services, + runner_context=global_state.runner_context, ) diff --git a/finecode_extension_runner/src/finecode_extension_runner/start.py b/finecode_extension_runner/src/finecode_extension_runner/start.py index e8b0f51..2e5e23f 100644 --- a/finecode_extension_runner/src/finecode_extension_runner/start.py +++ b/finecode_extension_runner/src/finecode_extension_runner/start.py @@ -1,5 +1,4 @@ -import inspect -import logging +import os import socket import sys @@ -7,61 +6,14 @@ import finecode_extension_runner.global_state as global_state import finecode_extension_runner.lsp_server as extension_runner_lsp -from finecode_extension_runner import logs -def start_runner_sync(env_name: str) -> None: - logger.remove() - # disable logging raw messages - # TODO: make configurable - # logger.configure(activation=[("pygls.protocol.json_rpc", False)]) - # ~~extension runner communicates with workspace manager with tcp, we can print logs - # to stdout as well~~. See README.md +def start_runner_sync() -> None: assert global_state.project_dir_path is not None - logs.save_logs_to_file( - file_path=global_state.project_dir_path - / ".venvs" - / env_name - / "logs" - / "runner.log", - log_level=global_state.log_level, - stdout=True, - ) - - # pygls uses standard python logger, intercept it and pass logs to loguru - class InterceptHandler(logging.Handler): - def emit(self, record: logging.LogRecord) -> None: - # Get corresponding Loguru level if it exists. - level: str | int - try: - level = logger.level(record.levelname).name - except ValueError: - level = record.levelno - - # Find caller from where originated the logged message. - frame, depth = inspect.currentframe(), 0 - while frame and ( - depth == 0 or frame.f_code.co_filename == logging.__file__ - ): - frame = frame.f_back - depth += 1 - - logger.opt(depth=depth, exception=record.exc_info).log( - level, record.getMessage() - ) - - logging.basicConfig(handlers=[InterceptHandler()], level=0, force=True) - - # TODO: make configurable - logs.set_log_level_for_group( - "finecode_extension_runner.impls.file_manager", logs.LogLevel.WARNING - ) - logs.set_log_level_for_group( - "finecode_extension_runner.impls.inmemory_cache", logs.LogLevel.WARNING - ) logger.info(f"Python executable: {sys.executable}") logger.info(f"Project path: {global_state.project_dir_path}") + logger.info(f"Process id: {os.getpid()}") server = extension_runner_lsp.create_lsp_server() # asyncio.run(server.start_io_async()) diff --git a/finecode_jsonrpc/.gitignore b/finecode_jsonrpc/.gitignore new file mode 100644 index 0000000..4279fe8 --- /dev/null +++ b/finecode_jsonrpc/.gitignore @@ -0,0 +1,20 @@ +__pycache__/ +*.py[cod] +*$py.class +*.so +.Python +build/ +develop-eggs/ +dist/ +downloads/ +eggs/ +.eggs/ +lib/ +lib64/ +parts/ +sdist/ +var/ +wheels/ +*.egg-info/ +.installed.cfg +*.egg diff --git a/finecode_jsonrpc/README.md b/finecode_jsonrpc/README.md new file mode 100644 index 0000000..93bb484 --- /dev/null +++ b/finecode_jsonrpc/README.md @@ -0,0 +1,3 @@ +# finecode_jsonrpc + +JSON-RPC client implementation for FineCode. diff --git a/finecode_jsonrpc/pyproject.toml b/finecode_jsonrpc/pyproject.toml new file mode 100644 index 0000000..07d2407 --- /dev/null +++ b/finecode_jsonrpc/pyproject.toml @@ -0,0 +1,24 @@ +[project] +name = "finecode_jsonrpc" +version = "0.1.0" +description = "JSON-RPC client implementation for FineCode" +authors = [{ name = "Vladyslav Hnatiuk", email = "aders1234@gmail.com" }] +readme = "README.md" +requires-python = ">=3.11, <= 3.14" +dependencies = ["loguru==0.7.*", "culsans==0.9.*", "apischema==0.19.*"] + +[dependency-groups] +dev_workspace = ["finecode==0.3.*", "finecode_dev_common_preset==0.2.*"] + +[tool.finecode] +presets = [{ source = "finecode_dev_common_preset" }] + +[tool.finecode.env.dev_workspace.dependencies] +finecode_dev_common_preset = { path = "../finecode_dev_common_preset", editable = true } +finecode = { path = "../", editable = true } +finecode_extension_runner = { path = "../finecode_extension_runner", editable = true } +finecode_extension_api = { path = "../finecode_extension_api", editable = true } +finecode_jsonrpc = { path = "./", editable = true } +finecode_builtin_handlers = { path = "../finecode_builtin_handlers", editable = true } +fine_python_lint = { path = "../presets/fine_python_lint", editable = true } +fine_python_format = { path = "../presets/fine_python_format", editable = true } diff --git a/finecode_jsonrpc/setup.py b/finecode_jsonrpc/setup.py new file mode 100644 index 0000000..841816a --- /dev/null +++ b/finecode_jsonrpc/setup.py @@ -0,0 +1,8 @@ +from setuptools import setup, find_packages + +setup( + name="finecode_jsonrpc", + package_dir={"": "src"}, + packages=find_packages(where="src"), + python_requires=">=3.11", +) diff --git a/src/finecode/runner/jsonrpc_client/__init__.py b/finecode_jsonrpc/src/finecode_jsonrpc/__init__.py similarity index 86% rename from src/finecode/runner/jsonrpc_client/__init__.py rename to finecode_jsonrpc/src/finecode_jsonrpc/__init__.py index 39a7caa..ee8700a 100644 --- a/src/finecode/runner/jsonrpc_client/__init__.py +++ b/finecode_jsonrpc/src/finecode_jsonrpc/__init__.py @@ -1,5 +1,4 @@ from .client import ( - create_lsp_client_io, JsonRpcClient, BaseRunnerRequestException, NoResponse, @@ -10,7 +9,6 @@ __all__ = [ - "create_lsp_client_io", "JsonRpcClient", "BaseRunnerRequestException", "NoResponse", diff --git a/src/finecode/runner/jsonrpc_client/_io_thread.py b/finecode_jsonrpc/src/finecode_jsonrpc/_io_thread.py similarity index 100% rename from src/finecode/runner/jsonrpc_client/_io_thread.py rename to finecode_jsonrpc/src/finecode_jsonrpc/_io_thread.py diff --git a/src/finecode/runner/jsonrpc_client/client.py b/finecode_jsonrpc/src/finecode_jsonrpc/client.py similarity index 78% rename from src/finecode/runner/jsonrpc_client/client.py rename to finecode_jsonrpc/src/finecode_jsonrpc/client.py index ea53ceb..38a9d9d 100644 --- a/src/finecode/runner/jsonrpc_client/client.py +++ b/finecode_jsonrpc/src/finecode_jsonrpc/client.py @@ -2,10 +2,10 @@ import traceback +import enum import dataclasses import functools import os -import shlex import subprocess import sys from pathlib import Path @@ -20,7 +20,7 @@ import culsans import apischema -from finecode.runner.jsonrpc_client import _io_thread +from finecode_jsonrpc import _io_thread from loguru import logger @@ -108,14 +108,23 @@ def __init__(self, error: ResponseError) -> None: self.error = error +class CommunicationType(enum.Enum): + TCP = enum.auto() + STDIO = enum.auto() + + def task_done_log_callback(future: asyncio.Future[typing.Any], task_id: str = ""): if future.cancelled(): logger.debug(f"task cancelled: {task_id}") else: exc = future.exception() if exc is not None: - logger.error(f"exception in task: {task_id}") + tb = "".join(traceback.format_exception(type(exc), exc, exc.__traceback__)) + logger.error( + f"exception in task: {task_id} {type(exc)} {exc.message if hasattr(exc, 'message') else ''}" + ) logger.exception(exc) + logger.error(tb) else: logger.trace(f"{task_id} done") @@ -131,7 +140,7 @@ class JsonRpcClient: CONTENT_TYPE: typing.Final[str] = "application/vscode-jsonrpc" VERSION: typing.Final[str] = "2.0" - def __init__(self, message_types: dict[str, typing.Any], readable_id: str) -> None: + def __init__(self, message_types: dict[str, typing.Any], readable_id: str, communication_type: CommunicationType = CommunicationType.TCP) -> None: self.server_process_stopped: typing.Final = threading.Event() self.server_exit_callback: ( collections.abc.Callable[[], collections.abc.Coroutine] | None @@ -141,6 +150,7 @@ def __init__(self, message_types: dict[str, typing.Any], readable_id: str) -> No self.writer = WriterFromQueue(out_queue=self.out_message_queue.sync_q) self.message_types = message_types self.readable_id: str = readable_id + self.communication_type = communication_type self._async_tasks: list[asyncio.Task[typing.Any]] = [] self._stop_event: typing.Final = threading.Event() @@ -149,27 +159,51 @@ def __init__(self, message_types: dict[str, typing.Any], readable_id: str) -> No self._expected_result_type_by_msg_id: dict[str, typing.Any] = {} self.feature_impls: dict[str, collections.abc.Callable] = {} + + # NOTE: reader and writer can be accessed only in IO thread + self._reader: asyncio.StreamReader | None = None + self._writer: asyncio.StreamWriter | None = None + self._async_tasks_in_io_thread: list[asyncio.Task[typing.Any]] = [] + self._tcp_port_future: asyncio.Future[int] | None = None def feature(self, name: str, impl: collections.abc.Callable) -> None: self.feature_impls[name] = impl - async def start_io( - self, cmd: str, io_thread: _io_thread.AsyncIOThread, *args, **kwargs - ): - """Start the given server and communicate with it over stdio.""" - full_cmd = shlex.join([cmd, *args]) + async def start( + self, + server_cmd: str, + working_dir_path: Path, + io_thread: _io_thread.AsyncIOThread, + debug_port_future: concurrent.futures.Future[int] | None, + connect: bool = True + ) -> None: + old_working_dir = os.getcwd() + os.chdir(working_dir_path) + + # temporary remove VIRTUAL_ENV env variable to avoid starting in wrong venv + old_virtual_env_var = os.environ.pop("VIRTUAL_ENV", None) + try: + await self._start_server(full_cmd=server_cmd, io_thread=io_thread, debug_port_future=debug_port_future) + if connect: + await self.connect_to_server(io_thread=io_thread) + finally: + if old_virtual_env_var is not None: + os.environ["VIRTUAL_ENV"] = old_virtual_env_var + + os.chdir(old_working_dir) # restore original working directory + + async def _start_server(self, full_cmd: str, io_thread: _io_thread.AsyncIOThread, debug_port_future: concurrent.futures.Future[int] | None) -> None: server_future = io_thread.run_coroutine( start_server( - full_cmd, - kwargs, - self.in_message_queue, - self.out_message_queue, - request_futures=self._sync_request_futures, - result_types=self._expected_result_type_by_msg_id, + cmd=full_cmd, + communication_type=self.communication_type, + out_message_queue=self.out_message_queue, stop_event=self._stop_event, server_stopped_event=self.server_process_stopped, server_id=self.readable_id, + async_tasks=self._async_tasks_in_io_thread, + debug_port_future=debug_port_future ) ) @@ -186,26 +220,47 @@ async def start_io( # there are no active tasks yet, no need to stop, just interrupt starting # the server raise server_start_exception - - message_processor_task = asyncio.create_task(self.process_incoming_messages()) - message_processor_task.add_done_callback( + + self._reader, self._writer, self._tcp_port_future = server_future.result() + + notify_exit = asyncio.create_task(self._server_process_stop_handler()) + notify_exit.add_done_callback( functools.partial( - task_done_log_callback, - task_id=f"process_incoming_messages|{self.readable_id}", + task_done_log_callback, task_id=f"notify_exit|{self.readable_id}" ) ) - notify_exit = asyncio.create_task(self.server_process_stop_handler()) - notify_exit.add_done_callback( + self._async_tasks.extend([notify_exit]) + logger.debug(f"End of start for {full_cmd}") + + async def connect_to_server(self, io_thread: _io_thread.AsyncIOThread, timeout: float | None = 30): + connect_to_server_future = io_thread.run_coroutine( + self._connect_to_server_io(timeout=timeout) + ) + + # add done callback to catch exceptions if coroutine fails + connect_to_server_future.add_done_callback( functools.partial( - task_done_log_callback, task_id=f"notify_exit|{self.readable_id}" + task_done_log_callback, task_id=f"connect_to_server_future|{self.readable_id}" ) ) - self._async_tasks.extend([message_processor_task, notify_exit]) - logger.debug(f"End of start io for {cmd}") + await asyncio.wrap_future(connect_to_server_future) + connect_to_server_future_exception = connect_to_server_future.exception() + if connect_to_server_future_exception is not None: + raise connect_to_server_future_exception - async def server_process_stop_handler(self): + # message processor task ends automatically after getting QUEUE_END message, + # no need to save it in `_async_tasks` for explicit stop. + message_processor_task = asyncio.create_task(self.process_incoming_messages()) + message_processor_task.add_done_callback( + functools.partial( + task_done_log_callback, + task_id=f"process_incoming_messages|{self.readable_id}", + ) + ) + + async def _server_process_stop_handler(self): """Cleanup handler that runs when the server process managed by the client exits""" # await asyncio.to_thread(self.server_process_stopped.wait) @@ -307,7 +362,7 @@ def notify(self, method: str, params: typing.Any | None = None) -> None: raise InvalidResponse( f"Failed to serialize notification: {error}" ) from error - + logger.trace(notification_str) self._send_data(notification_str) def send_request_sync( @@ -435,11 +490,13 @@ async def send_request( async def process_incoming_messages(self) -> None: logger.debug(f"Start processing messages from server {self.readable_id}") try: - while not self._stop_event.is_set(): + while True: raw_message = await self.in_message_queue.async_q.get() if raw_message == QUEUE_END: - logger.debug("Queue with messages from server was closed") + # TODO: this message doesn't come, task is always cancelled + logger.info("Queue with messages from server was closed") self.in_message_queue.async_q.task_done() + self.in_message_queue.async_q.shutdown() break try: @@ -449,9 +506,10 @@ async def process_incoming_messages(self) -> None: finally: self.in_message_queue.async_q.task_done() except asyncio.CancelledError: + # logger.warning("process_incoming_messages was cancelled") ... - self.in_message_queue.async_q.shutdown() + # self.in_message_queue.async_q.shutdown() logger.debug(f"End processing messages from server {self.readable_id}") async def handle_message(self, message: dict[str, typing.Any]) -> None: @@ -698,94 +756,142 @@ async def run_notification_impl(self, impl_coro) -> None: except ValueError: ... + async def _connect_to_server_io(self, timeout: float | None) -> None: + if self.communication_type == CommunicationType.TCP: + assert self._tcp_port_future is not None + + try: + await asyncio.wait_for(self._tcp_port_future, timeout) + except TimeoutError as exception: + for task in self._async_tasks_in_io_thread: + task.cancel() + + raise RunnerFailedToStart("Didn't get port in 30 seconds") from exception + + port = self._tcp_port_future.result() + logger.debug(f"Got port {port} | {self.readable_id}") + + try: + self._reader, self._writer = await asyncio.open_connection("127.0.0.1", port) + except Exception as exception: + logger.exception(exception) + + for task in self._async_tasks_in_io_thread: + task.cancel() + + raise RunnerFailedToStart(f"Failed to open connection: {exception}") from exception + + assert self._reader is not None and self._writer is not None + + task = asyncio.create_task( + read_messages_from_reader( + self._reader, + self.in_message_queue.sync_q, + self._sync_request_futures, + self._expected_result_type_by_msg_id, + self._stop_event, + server_id=self.readable_id, + ) + ) + task.add_done_callback( + functools.partial( + task_done_log_callback, task_id=f"read_messages_from_reader|{self.readable_id}" + ) + ) + self._async_tasks_in_io_thread.append(task) + + task = asyncio.create_task( + send_messages_from_queue(queue=self.out_message_queue.async_q, writer=self._writer) + ) + task.add_done_callback( + functools.partial( + task_done_log_callback, task_id=f"send_messages_from_queue|{self.readable_id}" + ) + ) + self._async_tasks_in_io_thread.append(task) + async def start_server( cmd: str, - subprocess_kwargs: dict[str, str], - in_message_queue: culsans.Queue[bytes], + communication_type: CommunicationType, out_message_queue: culsans.Queue[bytes], - request_futures: dict[str, concurrent.futures.Future[typing.Any]], - result_types: dict[str, typing.Any], stop_event: threading.Event, server_stopped_event: threading.Event, server_id: str, -): - logger.debug(f"Starting server process: {' '.join([cmd, str(subprocess_kwargs)])}") - - server = await asyncio.create_subprocess_shell( - cmd, - stdout=asyncio.subprocess.PIPE, - stderr=asyncio.subprocess.PIPE, - **subprocess_kwargs, - ) + async_tasks: list[asyncio.Task[typing.Any]], + debug_port_future: concurrent.futures.Future[int] | None +) -> tuple[asyncio.StreamReader | None, asyncio.StreamWriter | None, asyncio.Future[int] | None]: + logger.debug(f"Starting server process: {cmd}") + + creationflags = 0 + # start_new_session = True .. process has parent id of real parent, but is not + # ended if parent was ended + start_new_session = True + if sys.platform == "win32": + # use creationflags because `start_new_session` doesn't work on Windows + # subprocess.CREATE_NO_WINDOW .. no console window on Windows. TODO: test + creationflags = subprocess.DETACHED_PROCESS | subprocess.CREATE_NO_WINDOW + start_new_session = False + + subprocess_kwargs = { + "creationflags": creationflags, + "start_new_session": start_new_session, + } + + # Start subprocess with appropriate stdio configuration + if communication_type == CommunicationType.STDIO: + server = await asyncio.create_subprocess_shell( + cmd, + stdin=asyncio.subprocess.PIPE, + stdout=asyncio.subprocess.PIPE, + stderr=asyncio.subprocess.PIPE, + # max length of line: in STDIO mode, the whole file can be sent as a single + # line, increase default limit 64 KBit to 10 MiB + limit = 1024 * 1024 * 10, # 10 MiB, + **subprocess_kwargs, + ) + elif communication_type == CommunicationType.TCP: + server = await asyncio.create_subprocess_shell( + cmd, + stdout=asyncio.subprocess.PIPE, + stderr=asyncio.subprocess.PIPE, + **subprocess_kwargs, + ) + else: + raise ValueError(f"Unsupported communication type: {communication_type}") + logger.debug(f"{server_id} - process id: {server.pid}") - tasks: list[asyncio.Task[typing.Any]] = [] task = asyncio.create_task(log_stderr(server.stderr, stop_event)) task.add_done_callback( functools.partial(task_done_log_callback, task_id=f"log_stderr|{server_id}") ) - tasks.append(task) - - port_future: asyncio.Future[int] = asyncio.Future() - task = asyncio.create_task( - read_stdout(server.stdout, stop_event, port_future, server.pid) - ) - task.add_done_callback( - functools.partial(task_done_log_callback, task_id=f"read_stdout|{server_id}") - ) - tasks.append(task) - - logger.debug(f"Wait for port of {server.pid} | {server_id}") - - try: - await asyncio.wait_for(port_future, 15) - except TimeoutError: - raise RunnerFailedToStart("Didn't get port in 15 seconds") - - port = port_future.result() - logger.debug(f"Got port {port} of {server.pid} | {server_id}") - - try: - reader, writer = await asyncio.open_connection("127.0.0.1", port) - except Exception as exc: - logger.exception(exc) - - for task in tasks: - task.cancel() - - raise exc - - task = asyncio.create_task( - read_messages_from_reader( - reader, - in_message_queue.sync_q, - request_futures, - result_types, - stop_event, - server.pid, + async_tasks.append(task) + + # Get reader and writer based on communication type + if communication_type == CommunicationType.STDIO: + reader = server.stdout + writer = server.stdin + tcp_port_future = None + else: # CommunicationType.TCP + reader = None + writer = None + + # TODO: read debug port also in stdio + tcp_port_future = asyncio.Future[int]() + task = asyncio.create_task( + read_stdout(server.stdout, stop_event, tcp_port_future, server.pid, debug_port_future) ) - ) - task.add_done_callback( - functools.partial( - task_done_log_callback, task_id=f"read_messages_from_reader|{server_id}" + task.add_done_callback( + functools.partial(task_done_log_callback, task_id=f"read_stdout|{server_id}") ) - ) - tasks.append(task) + async_tasks.append(task) - task = asyncio.create_task( - send_messages_from_queue(queue=out_message_queue.async_q, writer=writer) - ) - task.add_done_callback( - functools.partial( - task_done_log_callback, task_id=f"send_messages_from_queue|{server_id}" - ) - ) - tasks.append(task) + logger.debug(f"Wait for port of {server.pid} | {server_id}") task = asyncio.create_task( wait_for_stop_event_and_clean( - stop_event, server, tasks, server_stopped_event, out_message_queue.async_q + stop_event, server, async_tasks, server_stopped_event, out_message_queue.async_q ) ) task.add_done_callback( @@ -794,7 +900,9 @@ async def start_server( ) ) - logger.debug(f"Server {server.pid} started | {server_id}") + logger.debug(f"Server {server.pid} started with {communication_type.name} | {server_id}") + + return (reader, writer, tcp_port_future) async def wait_for_stop_event_and_clean( @@ -855,6 +963,7 @@ async def read_stdout( stop_event: threading.Event, port_future: asyncio.Future[int], server_pid: int, + debug_port_future: concurrent.futures.Future[int] | None ) -> None: logger.debug(f"Start reading logs from stdout | {server_pid}") try: @@ -871,12 +980,23 @@ async def read_stdout( match = re.search(rb"Serving on \('[\d.]+', (\d+)\)", line) if match: port = int(match.group(1)) - port_future.set_result(port) + if not port_future.done(): + port_future.set_result(port) + elif b"Debug session:" in line: + match = re.search(rb"Debug session: [\d.]+:(\d+)", line) + if match: + port = int(match.group(1)) + if debug_port_future is not None and not debug_port_future.done(): + debug_port_future.set_result(port) # logger.debug( # f"Server {server_pid} stdout: {line.decode('utf-8', errors='replace').rstrip()}" # ) except asyncio.CancelledError: pass + # except Exception as exception: + # # catch all unexpected exception to log them properly and to get explicit log + # # about end of reading + # logger.exception(exception) logger.debug(f"End reading logs from stdout | {server_pid}") @@ -911,7 +1031,7 @@ async def read_messages_from_reader( request_futures: dict[str, concurrent.futures.Future[typing.Any]], result_types: dict[str, typing.Any], stop_event: threading.Event, - server_pid: int, + server_id: str, ) -> None: content_length = 0 @@ -921,18 +1041,18 @@ async def read_messages_from_reader( try: header = await reader.readline() except ValueError: - logger.error(f"Value error in readline of {server_pid}") + logger.error(f"Value error in readline of {server_id}") continue except ConnectionResetError: logger.warning( - f"Server {server_pid} closed the connection(ConnectionResetError), stop the client" + f"Server {server_id} closed the connection(ConnectionResetError), stop the client" ) stop_event.set() break if not header: if reader.at_eof(): - logger.debug(f"Reader reached EOF | {server_pid}") + logger.debug(f"Reader reached EOF | {server_id}") break continue @@ -941,10 +1061,10 @@ async def read_messages_from_reader( match = CONTENT_LENGTH_PATTERN.fullmatch(header) if match: content_length = int(match.group(1)) - logger.debug(f"Content length | {server_pid}: {content_length}") + logger.debug(f"Content length | {server_id}: {content_length}") else: logger.debug( - f"Not matched content length: {header} | {server_pid}" + f"Not matched content length: {header} | {server_id}" ) # Check if all headers have been read (as indicated by an empty line \r\n) @@ -955,13 +1075,13 @@ async def read_messages_from_reader( body = await reader.readexactly(content_length) except asyncio.IncompleteReadError as error: logger.debug( - f"Incomplete read error: {error} | {server_pid} : {error.partial}" + f"Incomplete read error: {error} | {server_id} : {error.partial}" ) content_length = 0 continue except ConnectionResetError: logger.warning( - f"Server {server_pid} closed the connection(ConnectionResetError), stop the client" + f"Server {server_id} closed the connection(ConnectionResetError), stop the client" ) stop_event.set() break @@ -970,12 +1090,12 @@ async def read_messages_from_reader( content_length = 0 continue - logger.debug(f"Got content {server_pid}: {body}") + logger.debug(f"Got content {server_id}: {body}") try: message = json.loads(body) except json.JSONDecodeError as exc: logger.error( - f"Failed to parse JSON message: {exc} | {server_pid}" + f"Failed to parse JSON message: {exc} | {server_id}" ) continue finally: @@ -990,7 +1110,7 @@ async def read_messages_from_reader( continue if message["jsonrpc"] != JsonRpcClient.VERSION: - logger.warning(f'Unknown message "{message}" | {server_pid}') + logger.warning(f'Unknown message "{message}" | {server_id}') continue # error should be also handled here @@ -1001,7 +1121,7 @@ async def read_messages_from_reader( ) if is_response: - logger.debug(f"Response message received. | {server_pid}") + logger.debug(f"Response message received. | {server_id}") msg_id = message["id"] raw_result = message.get("result", None) future = request_futures.pop(msg_id, None) @@ -1028,7 +1148,7 @@ async def read_messages_from_reader( continue logger.debug( - f'Received result for message "{msg_id}" | {server_pid}' + f'Received result for message "{msg_id}" | {server_id}' ) if not future.cancelled(): future.set_result(result) @@ -1042,59 +1162,19 @@ async def read_messages_from_reader( b"Content-Length:" ) and not header.startswith(b"Content-Type:"): logger.debug( - f'Something is wrong: {content_length} "{header}" {not header.strip()} | {server_pid}' + f'Something is wrong: {content_length} "{header}" {not header.strip()} | {server_id}' ) except Exception as exc: logger.exception( - f"Exception in message reader loop | {server_pid}: {exc}" + f"Exception in message reader loop | {server_id}: {exc}" ) # Reset state to avoid infinite loop on persistent errors content_length = 0 except asyncio.CancelledError: ... - logger.debug(f"End reading messages from reader | {server_pid}") - - -async def create_lsp_client_io( - server_cmd: str, - working_dir_path: Path, - message_types: dict[str, typing.Any], - io_thread: _io_thread.AsyncIOThread, - readable_id: str, -) -> JsonRpcClient: - ls = JsonRpcClient(message_types=message_types, readable_id=readable_id) - splitted_cmd = shlex.split(server_cmd) - executable, *args = splitted_cmd - - old_working_dir = os.getcwd() - os.chdir(working_dir_path) - - # temporary remove VIRTUAL_ENV env variable to avoid starting in wrong venv - old_virtual_env_var = os.environ.pop("VIRTUAL_ENV", None) - - creationflags = 0 - # start_new_session = True .. process has parent id of real parent, but is not - # ended if parent was ended - start_new_session = True - if sys.platform == "win32": - # use creationflags because `start_new_session` doesn't work on Windows - # subprocess.CREATE_NO_WINDOW .. no console window on Windows. TODO: test - creationflags = subprocess.DETACHED_PROCESS | subprocess.CREATE_NO_WINDOW - start_new_session = False - - await ls.start_io( - executable, - io_thread, - *args, - start_new_session=start_new_session, - creationflags=creationflags, - ) - if old_virtual_env_var is not None: - os.environ["VIRTUAL_ENV"] = old_virtual_env_var - - os.chdir(old_working_dir) # restore original working directory - return ls + message_queue.put_nowait(QUEUE_END) + logger.debug(f"End reading messages from reader | {server_id}") -__all__ = ["create_lsp_client_io", "JsonRpcClient"] +__all__ = ["JsonRpcClient"] diff --git a/finecode_jsonrpc/src/finecode_jsonrpc/py.typed b/finecode_jsonrpc/src/finecode_jsonrpc/py.typed new file mode 100644 index 0000000..e69de29 diff --git a/presets/fine_python_format/fine_python_format/preset.toml b/presets/fine_python_format/fine_python_format/preset.toml index 685024c..29621e2 100644 --- a/presets/fine_python_format/fine_python_format/preset.toml +++ b/presets/fine_python_format/fine_python_format/preset.toml @@ -1,10 +1,10 @@ -[tool.finecode.action.format] -source = "finecode_extension_api.actions.format.FormatAction" +[tool.finecode.action.format_files_python] +source = "finecode_extension_api.actions.format_files.FormatFilesAction" handlers = [ - { name = "ruff", source = "fine_python_ruff.RuffFormatHandler", env = "dev_no_runtime", dependencies = [ + { name = "ruff", source = "fine_python_ruff.RuffFormatFilesHandler", env = "dev_no_runtime", dependencies = [ "fine_python_ruff==0.1.*", ] }, - { name = "save", source = "finecode_extension_api.actions.format.SaveFormatHandler", env = "dev_no_runtime", dependencies = [ + { name = "save", source = "finecode_extension_api.actions.format_files.SaveFormatFilesHandler", env = "dev_no_runtime", dependencies = [ "finecode_extension_api==0.3.*", ] }, ] diff --git a/presets/fine_python_lint/fine_python_lint/preset.toml b/presets/fine_python_lint/fine_python_lint/preset.toml index 2bf41e3..9d7a5af 100644 --- a/presets/fine_python_lint/fine_python_lint/preset.toml +++ b/presets/fine_python_lint/fine_python_lint/preset.toml @@ -1,26 +1,26 @@ -[tool.finecode.action.lint] -source = "finecode_extension_api.actions.lint.LintAction" +[tool.finecode.action.lint_files_python] +source = "finecode_extension_api.actions.lint_files.LintFilesAction" handlers = [ - { name = "ruff", source = "fine_python_ruff.RuffLintHandler", env = "dev_no_runtime", dependencies = [ + { name = "ruff", source = "fine_python_ruff.RuffLintFilesHandler", env = "dev_no_runtime", dependencies = [ "fine_python_ruff==0.1.*", ] }, - { name = "flake8", source = "fine_python_flake8.Flake8LintHandler", env = "dev_no_runtime", dependencies = [ + { name = "flake8", source = "fine_python_flake8.Flake8LintFilesHandler", env = "dev_no_runtime", dependencies = [ "fine_python_flake8==0.2.*", ] }, - { name = "pyrefly", source = "fine_python_pyrefly.PyreflyLintHandler", env = "dev_no_runtime", dependencies = [ + { name = "pyrefly", source = "fine_python_pyrefly.PyreflyLintFilesHandler", env = "dev_no_runtime", dependencies = [ "fine_python_pyrefly==0.1.*", ] }, ] [[tool.finecode.action_handler]] -source = "fine_python_ruff.RuffLintHandler" +source = "fine_python_ruff.RuffLintFilesHandler" config.extend_select = ["B", "I"] # flake8 is used only for custom rules, all standard rules are checked by ruff, but # keep flake8 configuration if someone activates some rules or uses flake8 config # parameters in their own rules [[tool.finecode.action_handler]] -source = "fine_python_flake8.Flake8LintHandler" +source = "fine_python_flake8.Flake8LintFilesHandler" config.max_line_length = 80 # W391 is not compatible with black(and ruff formatter, which is compatible with black), # because black adds an empty line to the end of the file diff --git a/pyproject.toml b/pyproject.toml index c2155f4..e9a5d2f 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -11,10 +11,11 @@ dependencies = [ "watchdog==4.0.*", "click==8.1.*", "pydantic==2.11.*", - "pygls==2.0.0-a6", + "pygls==2.0.0", "finecode_extension_api==0.3.*", "finecode_extension_runner==0.3.*", "finecode_builtin_handlers==0.1.*", + "finecode_jsonrpc==0.1.*", "ordered-set==4.1.*", "mcp==1.13.*", "fine_python_virtualenv==0.1.*", @@ -47,6 +48,12 @@ presets = [{ source = "finecode_dev_common_preset" }] finecode_dev_common_preset = { path = "./finecode_dev_common_preset", editable = true } finecode_extension_runner = { path = "./finecode_extension_runner", editable = true } finecode_extension_api = { path = "./finecode_extension_api", editable = true } +finecode_jsonrpc = { path = "./finecode_jsonrpc", editable = true } +fine_python_lint = { path = "./presets/fine_python_lint", editable = true } +fine_python_format = { path = "./presets/fine_python_format", editable = true } + +[tool.finecode.env.runtime.dependencies] +finecode_jsonrpc = { path = "./finecode_jsonrpc", editable = true } [tool.importlinter] root_package = "finecode" diff --git a/src/finecode/base_config.toml b/src/finecode/base_config.toml index 0c59fd4..f6b533e 100644 --- a/src/finecode/base_config.toml +++ b/src/finecode/base_config.toml @@ -101,3 +101,31 @@ name = "list_project_files_by_lang_python" source = "fine_python_package_info.ListProjectFilesByLangPythonHandler" env = "dev_no_runtime" dependencies = ["fine_python_package_info==0.1.*"] + +[tool.finecode.action.group_project_files_by_lang] +source = "finecode_extension_api.actions.group_project_files_by_lang.GroupProjectFilesByLangAction" + +[[tool.finecode.action.group_project_files_by_lang.handlers]] +name = "group_project_files_by_lang_python" +source = "fine_python_package_info.GroupProjectFilesByLangPythonHandler" +env = "dev_no_runtime" +dependencies = ["fine_python_package_info==0.1.*"] + + +[tool.finecode.action.lint] +source = "finecode_extension_api.actions.lint.LintAction" + +[[tool.finecode.action.lint.handlers]] +name = "lint" +source = "finecode_builtin_handlers.LintHandler" +env = "dev_no_runtime" +dependencies = ["finecode_builtin_handlers==0.1.*"] + +[tool.finecode.action.format] +source = "finecode_extension_api.actions.format.FormatAction" + +[[tool.finecode.action.format.handlers]] +name = "format" +source = "finecode_builtin_handlers.FormatHandler" +env = "dev_no_runtime" +dependencies = ["finecode_builtin_handlers==0.1.*"] diff --git a/src/finecode/cli_app/commands/dump_config_cmd.py b/src/finecode/cli_app/commands/dump_config_cmd.py index 93fb593..6ef81eb 100644 --- a/src/finecode/cli_app/commands/dump_config_cmd.py +++ b/src/finecode/cli_app/commands/dump_config_cmd.py @@ -38,7 +38,7 @@ async def dump_config(workdir_path: pathlib.Path, project_name: str): except config_models.ConfigurationError as exception: raise DumpFailed( f"Reading project configs(without presets) in {project.dir_path} failed: {exception.message}" - ) + ) from exception # Some tools like IDE extensions for syntax highlighting rely on # file name. Keep file name of config the same and save in subdirectory @@ -59,7 +59,7 @@ async def dump_config(workdir_path: pathlib.Path, project_name: str): except runner_manager.RunnerFailedToStart as exception: raise DumpFailed( f"Starting runners with presets failed: {exception.message}" - ) + ) from exception try: await run_service.start_required_environments( @@ -68,7 +68,7 @@ async def dump_config(workdir_path: pathlib.Path, project_name: str): except run_service.StartingEnvironmentsFailed as exception: raise DumpFailed( f"Failed to start environments for running 'dump_config': {exception.message}" - ) + ) from exception project_raw_config = ws_context.ws_projects_raw_configs[project_dir_path] @@ -83,6 +83,8 @@ async def dump_config(workdir_path: pathlib.Path, project_name: str): ws_context=ws_context, result_format=run_service.RunResultFormat.STRING, preprocess_payload=False, + run_trigger=run_service.RunActionTrigger.USER, + dev_env=run_service.DevEnv.CLI, ) logger.info(f"Dumped config into {dump_file_path}") finally: diff --git a/src/finecode/cli_app/commands/prepare_envs_cmd.py b/src/finecode/cli_app/commands/prepare_envs_cmd.py index bf749c4..9ed92b5 100644 --- a/src/finecode/cli_app/commands/prepare_envs_cmd.py +++ b/src/finecode/cli_app/commands/prepare_envs_cmd.py @@ -59,7 +59,7 @@ async def prepare_envs(workdir_path: pathlib.Path, recreate: bool) -> None: except config_models.ConfigurationError as exception: raise PrepareEnvsFailed( f"Reading project config and collecting actions in {project.dir_path} failed: {exception.message}" - ) + ) from exception try: # try to start runner in 'dev_workspace' env of each project. If venv doesn't @@ -83,7 +83,7 @@ async def prepare_envs(workdir_path: pathlib.Path, recreate: bool) -> None: except runner_manager.RunnerFailedToStart as exception: raise PrepareEnvsFailed( f"Starting runners with presets failed: {exception.message}" - ) + ) from exception # now all 'dev_workspace' envs are valid, run 'prepare_runners' in them to create # venvs and install runners and presets in them @@ -107,7 +107,12 @@ async def prepare_envs(workdir_path: pathlib.Path, recreate: bool) -> None: result_output, result_return_code, ) = await utils.run_actions_in_projects_and_concat_results( - actions_by_projects, action_payload, ws_context, concurrently=True + actions_by_projects, + action_payload, + ws_context, + concurrently=True, + run_trigger=run_service.RunActionTrigger.USER, + dev_env=run_service.DevEnv.CLI, ) except run_service.ActionRunFailed as error: logger.error(error.message) @@ -128,7 +133,12 @@ async def prepare_envs(workdir_path: pathlib.Path, recreate: bool) -> None: result_output, result_return_code, ) = await utils.run_actions_in_projects_and_concat_results( - actions_by_projects, action_payload, ws_context, concurrently=True + actions_by_projects, + action_payload, + ws_context, + concurrently=True, + run_trigger=run_service.RunActionTrigger.USER, + dev_env=run_service.DevEnv.CLI, ) except run_service.ActionRunFailed as error: logger.error(error.message) @@ -192,13 +202,11 @@ async def check_or_recreate_all_dev_workspace_envs( current_project_dir_path = ws_context.ws_dirs_paths[0] current_project = ws_context.ws_projects[current_project_dir_path] try: - await runner_manager.start_runner( - project_def=current_project, env_name="dev_workspace", ws_context=ws_context - ) + await runner_manager._start_dev_workspace_runner(project_def=current_project, ws_context=ws_context) except runner_manager.RunnerFailedToStart as exception: raise PrepareEnvsFailed( f"Failed to start `dev_workspace` runner in {current_project.name}: {exception.message}" - ) + ) from exception envs = [] @@ -253,11 +261,13 @@ async def check_or_recreate_all_dev_workspace_envs( ws_context=ws_context, result_format=run_service.RunResultFormat.STRING, preprocess_payload=False, + run_trigger=run_service.RunActionTrigger.USER, + dev_env=run_service.DevEnv.CLI, ) except run_service.ActionRunFailed as exception: raise PrepareEnvsFailed( f"'prepare_dev_workspaces_env' failed in {current_project.name}: {exception.message}" - ) + ) from exception if action_result.return_code != 0: raise PrepareEnvsFailed( diff --git a/src/finecode/cli_app/commands/run_cmd.py b/src/finecode/cli_app/commands/run_cmd.py index de205fd..81eaf52 100644 --- a/src/finecode/cli_app/commands/run_cmd.py +++ b/src/finecode/cli_app/commands/run_cmd.py @@ -108,7 +108,7 @@ async def run_actions( except config_models.ConfigurationError as exception: raise RunFailed( f"Reading project config and collecting actions in {project.dir_path} failed: {exception.message}" - ) + ) from exception try: # 1. Start runners with presets to be able to resolve presets. Presets are @@ -118,8 +118,8 @@ async def run_actions( except runner_manager.RunnerFailedToStart as exception: raise RunFailed( f"One or more projects are misconfigured, runners for them didn't" - f" start: {exception.message}. Check logs for details." - ) + + f" start: {exception.message}. Check logs for details." + ) from exception except Exception as exception: logger.error("Unexpected exception:") logger.exception(exception) @@ -153,14 +153,21 @@ async def run_actions( except run_service.StartingEnvironmentsFailed as exception: raise RunFailed( f"Failed to start environments for running actions: {exception.message}" - ) + ) from exception try: return await utils.run_actions_in_projects_and_concat_results( - actions_by_projects, action_payload, ws_context, concurrently + actions_by_projects, + action_payload, + ws_context, + concurrently, + run_trigger=run_service.RunActionTrigger.USER, + dev_env=run_service.DevEnv.CLI, ) except run_service.ActionRunFailed as exception: - raise RunFailed(f"Failed to run actions: {exception.message}") + raise RunFailed( + f"Failed to run actions: {exception.message}" + ) from exception finally: shutdown_service.on_shutdown(ws_context) @@ -178,10 +185,10 @@ def get_projects_by_names( for project in ws_context.ws_projects.values() if project.name == project_name ) - except StopIteration: + except StopIteration as exception: raise RunFailed( f"Project '{projects_names}' not found in working directory '{workdir_path}'" - ) + ) from exception projects.append(project) return projects diff --git a/src/finecode/cli_app/utils.py b/src/finecode/cli_app/utils.py index bd3dcaa..e734998 100644 --- a/src/finecode/cli_app/utils.py +++ b/src/finecode/cli_app/utils.py @@ -81,6 +81,8 @@ async def run_actions_in_projects_and_concat_results( action_payload: dict[str, str], ws_context: context.WorkspaceContext, concurrently: bool, + run_trigger: run_service.RunActionTrigger, + dev_env: run_service.DevEnv, ) -> tuple[str, int]: result_by_project = await run_service.run_actions_in_projects( actions_by_project=actions_by_project, @@ -88,6 +90,8 @@ async def run_actions_in_projects_and_concat_results( ws_context=ws_context, concurrently=concurrently, result_format=run_service.RunResultFormat.STRING, + run_trigger=run_trigger, + dev_env=dev_env ) result_output: str = "" diff --git a/src/finecode/context.py b/src/finecode/context.py index 7bd0914..a379b46 100644 --- a/src/finecode/context.py +++ b/src/finecode/context.py @@ -8,7 +8,7 @@ if TYPE_CHECKING: from finecode.runner.runner_client import ExtensionRunnerInfo - from finecode.runner._io_thread import AsyncIOThread + from finecode_jsonrpc._io_thread import AsyncIOThread @dataclass @@ -25,10 +25,12 @@ class WorkspaceContext: default_factory=dict ) runner_io_thread: AsyncIOThread | None = None - ignore_watch_paths: set[Path] = field(default_factory=set) - # we save list of meta and pygls manages content of documents automatically. - # They can be accessed using `ls.workspace.get_text_document()` function + # LSP doesn't provide endpoint to get opened files on client. The server should + # listen to didOpen and didClose events and manage state by itself. In this + # dictionary meta info of opened document is stored to be able to provide opened files + # to ERs in case of their restart. + # TODO: move in LSP server opened_documents: dict[str, domain.TextDocumentInfo] = field(default_factory=dict) # cache diff --git a/src/finecode/domain.py b/src/finecode/domain.py index 436774a..75c9d34 100644 --- a/src/finecode/domain.py +++ b/src/finecode/domain.py @@ -134,7 +134,7 @@ def __init__(self, runner_config: RunnerConfig) -> None: class TextDocumentInfo: - def __init__(self, uri: str, version: str) -> None: + def __init__(self, uri: str, version: str | int) -> None: self.uri = uri self.version = version diff --git a/src/finecode/logger_utils.py b/src/finecode/logger_utils.py index 4aedf7a..66fa883 100644 --- a/src/finecode/logger_utils.py +++ b/src/finecode/logger_utils.py @@ -19,9 +19,10 @@ def init_logger(trace: bool, stdout: bool = False): activation=[ ("pygls.protocol.json_rpc", False), ("pygls.feature_manager", False), - ("pygls.io_", False), + # ("pygls.io_", False), ] ) + logs.set_log_level_for_group(group="finecode_jsonrpc.client", level=logs.LogLevel.INFO) logs.save_logs_to_file( file_path=logs_dir_path / "workspace_manager.log", log_level="TRACE" if trace else "INFO", diff --git a/src/finecode/lsp_server/api.proto b/src/finecode/lsp_server/api.proto deleted file mode 100644 index 0f594e9..0000000 --- a/src/finecode/lsp_server/api.proto +++ /dev/null @@ -1,64 +0,0 @@ -syntax = "proto3"; - -package finecode; - -message AddWorkspaceDirRequest { - string dir_path = 1; -} - -message AddWorkspaceDirResponse { -} - -message DeleteWorkspaceDirRequest { - string dir_path = 1; -} - -message DeleteWorkspaceDirResponse { - -} - -message ListActionsRequest { - string parent_node_id = 1; -} - -message ActionTreeNode { - string node_id = 1; - string name = 2; - NodeType node_type = 3; - repeated ActionTreeNode subnodes = 4; - - enum NodeType { - DIRECTORY = 0; - PACKAGE = 1; - ACTION = 2; - PRESET = 3; - } -} - -message ListActionsResponse { - repeated ActionTreeNode nodes = 1; -} - -message RunActionRequest { - string action_node_id = 1; - // following cases are possible: - // 1. Running action on existing file: - // 1.1 Unsaved: both apply_on and apply_on_text are passed - // 1.2 Saved: either apply_on or both are passed - // 2. Running action on a new unsaved yet file: only apply_on_text is passed. - string apply_on = 2; - string apply_on_text = 3; -} - -message RunActionResponse { - // if target was passed as text and was changed, result is returned in this parameter - string result_text = 1; -} - -service WorkspaceManagerService { - rpc AddWorkspaceDir(AddWorkspaceDirRequest) returns (AddWorkspaceDirResponse); - rpc DeleteWorkspaceDir(DeleteWorkspaceDirRequest) returns (DeleteWorkspaceDirResponse); - - rpc ListActions(ListActionsRequest) returns (ListActionsResponse); - rpc RunAction(RunActionRequest) returns (RunActionResponse); -} diff --git a/src/finecode/lsp_server/endpoints/action_tree.py b/src/finecode/lsp_server/endpoints/action_tree.py index 7d77a54..0cbde88 100644 --- a/src/finecode/lsp_server/endpoints/action_tree.py +++ b/src/finecode/lsp_server/endpoints/action_tree.py @@ -17,7 +17,9 @@ async def list_actions(ls: LanguageServer, params): logger.info(f"list_actions {params}") await global_state.server_initialized.wait() - parent_node_id = params[0] + # params is expected to be a list, but pygls seems to pass the first element of list + # if the list contains only one element. Test after migration from pygls + parent_node_id = params # params[0] request = schemas.ListActionsRequest(parent_node_id=parent_node_id) result = await _list_actions(request=request) return result.model_dump(by_alias=True) @@ -40,6 +42,7 @@ def get_project_action_tree( actions_nodes: list[schemas.ActionTreeNode] = [] if project.status == domain.ProjectStatus.CONFIG_VALID: assert project.actions is not None + action_nodes: list[schemas.ActionTreeNode] = [] for action in project.actions: node_id = f"{project.dir_path.as_posix()}::{action.name}" handlers_nodes = [ @@ -52,7 +55,7 @@ def get_project_action_tree( ) for handler in action.handlers ] - actions_nodes.append( + action_nodes.append( schemas.ActionTreeNode( node_id=node_id, name=action.name, @@ -66,6 +69,41 @@ def get_project_action_tree( project_path=project.dir_path, action_name=action.name, ) + + node_id = f"{project.dir_path.as_posix()}::actions" + actions_nodes.append( + schemas.ActionTreeNode( + node_id=node_id, + name="Actions", + node_type=schemas.ActionTreeNode.NodeType.ACTION_GROUP, + subnodes=action_nodes, + status="", + ) + ) + + envs_nodes: list[schemas.ActionTreeNode] = [] + for env in project.envs: + node_id = f"{project.dir_path.as_posix()}::envs::{env}" + envs_nodes.append( + schemas.ActionTreeNode( + node_id=node_id, + name=env, + node_type=schemas.ActionTreeNode.NodeType.ENV, + subnodes=[], + status="", + ) + ) + + node_id = f"{project.dir_path.as_posix()}::envs" + actions_nodes.append( + schemas.ActionTreeNode( + node_id=node_id, + name="Environments", + node_type=schemas.ActionTreeNode.NodeType.ENV_GROUP, + subnodes=envs_nodes, + status="", + ) + ) else: logger.info( f"Project has no valid config and finecode: {project.dir_path}, no actions will be shown" @@ -85,19 +123,16 @@ def create_node_list_for_ws( all_projects_paths = list(ws_context.ws_projects.keys()) all_projects_paths.sort() + # use sets to assign each project path to a single workspace directory + all_projects_paths_set = ordered_set.OrderedSet(all_projects_paths) - while len(all_ws_dirs) > 0: - ws_dir = all_ws_dirs.pop() - projects_by_ws_dir[ws_dir] = [] - - while True: - project_path = all_projects_paths[0] - if project_path.is_relative_to(ws_dir): - projects_by_ws_dir[ws_dir].append(project_path) - all_projects_paths.pop(0) + for ws_dir in all_ws_dirs: + ws_dir_project_paths = [project_path for project_path in all_projects_paths_set if project_path.is_relative_to(ws_dir)] + projects_by_ws_dir[ws_dir] = ws_dir_project_paths + all_projects_paths_set -= ordered_set.OrderedSet(ws_dir_project_paths) - if len(all_projects_paths) == 0: - break + if len(all_projects_paths_set) > 0: + logger.warning(f"Unexpected setup: these projects {all_projects_paths_set} don't belong to any of workspace dirs: {all_ws_dirs}") # build node tree so that: # - all ws dirs are in tree either as project or directory diff --git a/src/finecode/lsp_server/endpoints/diagnostics.py b/src/finecode/lsp_server/endpoints/diagnostics.py index b1e9f2d..6a32288 100644 --- a/src/finecode/lsp_server/endpoints/diagnostics.py +++ b/src/finecode/lsp_server/endpoints/diagnostics.py @@ -12,8 +12,6 @@ from finecode import ( context, - domain, - project_analyzer, pygls_types_utils, ) from finecode.services import run_service @@ -63,10 +61,12 @@ async def document_diagnostic_with_full_result( response = await run_service.find_action_project_and_run( file_path=file_path, action_name="lint", - # TODO: use payload class params={ + "target": "files", "file_paths": [file_path], }, + run_trigger=run_service.RunActionTrigger.SYSTEM, + dev_env=run_service.DevEnv.IDE, ws_context=global_state.ws_context, ) except run_service.ActionRunFailed as error: @@ -131,6 +131,8 @@ async def document_diagnostic_with_partial_results( "file_paths": [file_path], }, partial_result_token=partial_result_token, + run_trigger=run_service.RunActionTrigger.SYSTEM, + dev_env=run_service.DevEnv.IDE, ws_context=global_state.ws_context, ) as response: # LSP defines that the first response should be `DocumentDiagnosticReport` @@ -249,6 +251,8 @@ async def run_workspace_diagnostic_with_partial_results( params=exec_info.request_data, partial_result_token=partial_result_token, project_dir_path=exec_info.project_dir_path, + run_trigger=run_service.RunActionTrigger.SYSTEM, + dev_env=run_service.DevEnv.IDE, ws_context=global_state.ws_context, ) as response: # use pydantic dataclass to convert dict to dataclass instance recursively @@ -314,6 +318,8 @@ async def workspace_diagnostic_with_full_result( params=exec_info.request_data, project_def=project, ws_context=ws_context, + run_trigger=run_service.RunActionTrigger.SYSTEM, + dev_env=run_service.DevEnv.IDE, preprocess_payload=False, ) ) @@ -352,48 +358,37 @@ async def _workspace_diagnostic( params: types.WorkspaceDiagnosticParams, ) -> types.WorkspaceDiagnosticReport | None: relevant_projects_paths: list[Path] = run_service.find_all_projects_with_action( - action_name="lint", ws_context=global_state.ws_context + # check lint_files, because 'lint' is builtin and exists in all projects by default + action_name="lint_files_python", + ws_context=global_state.ws_context, # TODO: correct check of name ) exec_info_by_project_dir_path: dict[Path, LintActionExecInfo] = {} + actions_by_projects: dict[Path, list[str]] = {} for project_dir_path in relevant_projects_paths: - project = global_state.ws_context.ws_projects[project_dir_path] exec_info_by_project_dir_path[project_dir_path] = LintActionExecInfo( - project_dir_path=project_dir_path, action_name="lint" + project_dir_path=project_dir_path, + action_name="lint", + request_data={"target": "project", "trigger": "system", "dev_env": "ide"}, ) - - # find which runner is responsible for which files - # currently FineCode supports only raw python files, find them in each ws project - # exclude projects without finecode - # if both parent and child projects have lint action, exclude files of chid from - # parent - # check which runners are active and run in them - # - # assign files to projects - files_by_projects: dict[ - Path, list[Path] - ] = await project_analyzer.get_files_by_projects( - projects_dirs_paths=relevant_projects_paths, ws_context=global_state.ws_context - ) - - for project_dir_path, files_for_runner in files_by_projects.items(): - project = global_state.ws_context.ws_projects[project_dir_path] - if project.status != domain.ProjectStatus.CONFIG_VALID: - logger.warning( - f"Project {project_dir_path} has not valid configuration and finecode," - " lint in it will not be executed" - ) - continue - - exec_info = exec_info_by_project_dir_path[project_dir_path] - if exec_info.action_name == "lint": - exec_info.request_data = { - "file_paths": [file_path.as_posix() for file_path in files_for_runner], - } + actions_by_projects[project_dir_path] = ["lint"] exec_infos = list(exec_info_by_project_dir_path.values()) run_with_partial_results: bool = params.partial_result_token is not None + # linting is resource-intensive task. First start all runners and only then begin + # linting to avoid the case, when some of runners start first, take all available + # resources and other stay blocked. Starting of environment has timeout and the + # letter fail with timeout error. + try: + await run_service.start_required_environments( + actions_by_projects, global_state.ws_context + ) + except run_service.StartingEnvironmentsFailed as exception: + logger.error( + f"Failed to start required environments for running workspace diagnostic: {exception.message}" + ) + if run_with_partial_results: return await workspace_diagnostic_with_partial_results( exec_infos=exec_infos, partial_result_token=params.partial_result_token diff --git a/src/finecode/lsp_server/endpoints/document_sync.py b/src/finecode/lsp_server/endpoints/document_sync.py index d1b5081..cec258e 100644 --- a/src/finecode/lsp_server/endpoints/document_sync.py +++ b/src/finecode/lsp_server/endpoints/document_sync.py @@ -106,3 +106,57 @@ async def document_did_change( global_state.ws_context.opened_documents[ params.text_document.uri ].version = params.text_document.version + + logger.trace(f"Document did change: {params.text_document.uri}") + file_path = Path(params.text_document.uri.replace("file://", "")) + projects_paths = [ + project_path + for project_path, project in global_state.ws_context.ws_projects.items() + if project.status == domain.ProjectStatus.CONFIG_VALID + and file_path.is_relative_to(project_path) + ] + + content_changes = [] + for change in params.content_changes: + if isinstance(change, types.TextDocumentContentChangePartial): + mapped_change = runner_client.TextDocumentContentChangePartial( + range=runner_client.Range( + start=runner_client.Position(line=change.range.start.line, character=change.range.start.character), + end=runner_client.Position(line=change.range.end.line, character=change.range.end.character) + ), + text=change.text, + range_length=change.range_length + ) + content_changes.append(mapped_change) + elif isinstance(change, types.TextDocumentContentChangeWholeDocument): + mapped_change = runner_client.TextDocumentContentChangeWholeDocument(text=change.text) + content_changes.append(mapped_change) + else: + logger.error(f"Got unsupported content change from LSP client: {type(change)}, skip it") + continue + + change_params = runner_client.DidChangeTextDocumentParams( + text_document=runner_client.VersionedTextDocumentIdentifier(version=params.text_document.version, uri=params.text_document.uri), + content_changes=content_changes + ) + + try: + async with asyncio.TaskGroup() as tg: + for project_path in projects_paths: + runners_by_env = global_state.ws_context.ws_projects_extension_runners[ + project_path + ] + for runner in runners_by_env.values(): + if runner.status != runner_client.RunnerStatus.RUNNING: + logger.trace( + f"Runner {runner.readable_id} is not running, skip it" + ) + continue + + tg.create_task( + runner_client.notify_document_did_change( + runner=runner, change_params=change_params + ) + ) + except ExceptionGroup as e: + logger.error(f"Error while sending changed document: {e}") diff --git a/src/finecode/lsp_server/endpoints/formatting.py b/src/finecode/lsp_server/endpoints/formatting.py index e393e54..19e6c07 100644 --- a/src/finecode/lsp_server/endpoints/formatting.py +++ b/src/finecode/lsp_server/endpoints/formatting.py @@ -24,6 +24,8 @@ async def format_document(ls: LanguageServer, params: types.DocumentFormattingPa file_path=file_path, action_name="format", params={"file_paths": [file_path], "save": False}, + run_trigger=run_service.RunActionTrigger.USER, + dev_env=run_service.DevEnv.IDE, ws_context=global_state.ws_context, ) except Exception as error: # TODO diff --git a/src/finecode/lsp_server/endpoints/inlay_hints.py b/src/finecode/lsp_server/endpoints/inlay_hints.py index 0d20b66..f53a219 100644 --- a/src/finecode/lsp_server/endpoints/inlay_hints.py +++ b/src/finecode/lsp_server/endpoints/inlay_hints.py @@ -53,6 +53,8 @@ async def document_inlay_hint( file_path=file_path, action_name="text_document_inlay_hint", params=inlay_hint_params_to_dict(params), + run_trigger=run_service.RunActionTrigger.SYSTEM, + dev_env=run_service.DevEnv.IDE, ws_context=global_state.ws_context, ) except find_project.FileHasNotActionException: diff --git a/src/finecode/lsp_server/lsp_server.py b/src/finecode/lsp_server/lsp_server.py index 454e73f..38803cb 100644 --- a/src/finecode/lsp_server/lsp_server.py +++ b/src/finecode/lsp_server/lsp_server.py @@ -1,15 +1,17 @@ import asyncio +import collections.abc from functools import partial from pathlib import Path from typing import Any from loguru import logger from lsprotocol import types +from pygls.workspace import position_codec from pygls.lsp.server import LanguageServer from finecode_extension_runner.lsp_server import CustomLanguageServer from finecode.services import shutdown_service -from finecode.runner import runner_manager, runner_client +from finecode.runner import runner_manager from finecode.lsp_server import global_state, schemas, services from finecode.lsp_server.endpoints import action_tree as action_tree_endpoints from finecode.lsp_server.endpoints import code_actions as code_actions_endpoints @@ -20,7 +22,17 @@ from finecode.lsp_server.endpoints import inlay_hints as inlay_hints_endpoints +def position_from_client_units( + self, lines: collections.abc.Sequence[str], position: types.Position +) -> types.Position: + return position + + def create_lsp_server() -> CustomLanguageServer: + # avoid recalculating of positions by pygls + position_codec.PositionCodec.position_from_client_units = position_from_client_units + + # handle all requests explicitly because there are different types of requests: # project-specific, workspace-wide. Some Workspace-wide support partial responses, # some not. @@ -127,6 +139,11 @@ def create_lsp_server() -> CustomLanguageServer: "finecode.restartExtensionRunner" ) register_restart_extension_runner_cmd(restart_extension_runner) + + register_restart_and_debug_extension_runner_cmd = server.command( + "finecode.restartAndDebugExtensionRunner" + ) + register_restart_and_debug_extension_runner_cmd(restart_and_debug_extension_runner) register_shutdown_feature = server.feature(types.SHUTDOWN) register_shutdown_feature(_on_shutdown) @@ -134,35 +151,35 @@ def create_lsp_server() -> CustomLanguageServer: return server -LOG_LEVEL_MAP = { - "DEBUG": types.MessageType.Debug, - "INFO": types.MessageType.Info, - "SUCCESS": types.MessageType.Info, - "WARNING": types.MessageType.Warning, - "ERROR": types.MessageType.Error, - "CRITICAL": types.MessageType.Error, -} +# LOG_LEVEL_MAP = { +# "DEBUG": types.MessageType.Debug, +# "INFO": types.MessageType.Info, +# "SUCCESS": types.MessageType.Info, +# "WARNING": types.MessageType.Warning, +# "ERROR": types.MessageType.Error, +# "CRITICAL": types.MessageType.Error, +# } async def _on_initialized(ls: LanguageServer, params: types.InitializedParams): - def pass_log_to_ls_client(log) -> None: - # disabling and enabling logging of pygls package is required to avoid logging - # loop, because there are logs inside of log_trace and window_log_message - # functions - logger.disable("pygls") - if log.record["level"].no < 10: - # trace - ls.log_trace(types.LogTraceParams(message=log.record["message"])) - else: - level = LOG_LEVEL_MAP.get(log.record["level"].name, types.MessageType.Info) - ls.window_log_message( - types.LogMessageParams(type=level, message=log.record["message"]) - ) - logger.enable("pygls") - # module-specific config should be reapplied after disabling and enabling logger - # for the whole package - # TODO: unify with main - logger.configure(activation=[("pygls.protocol.json_rpc", False)]) + # def pass_log_to_ls_client(log) -> None: + # # disabling and enabling logging of pygls package is required to avoid logging + # # loop, because there are logs inside of log_trace and window_log_message + # # functions + # logger.disable("pygls") + # if log.record["level"].no < 10: + # # trace + # ls.log_trace(types.LogTraceParams(message=log.record["message"])) + # else: + # level = LOG_LEVEL_MAP.get(log.record["level"].name, types.MessageType.Info) + # ls.window_log_message( + # types.LogMessageParams(type=level, message=log.record["message"]) + # ) + # logger.enable("pygls") + # # module-specific config should be reapplied after disabling and enabling logger + # # for the whole package + # # TODO: unify with main + # logger.configure(activation=[("pygls.protocol.json_rpc", False)]) # loguru doesn't support passing partial with ls parameter, use nested function # instead @@ -170,33 +187,8 @@ def pass_log_to_ls_client(log) -> None: # Disabled, because it is not thread-safe and it means not compatible with IO thread # logger.add(sink=pass_log_to_ls_client) - async def get_document( - params: runner_client.GetDocumentParams, - ) -> runner_client.GetDocumentResult: - try: - doc_info = global_state.ws_context.opened_documents[params.uri] - except KeyError: - # this error can happen even if ER processes documents correctly: document - # is opened, action execution starts, user closes the document, ER is busy - # at this moment, action execution comes to reading the file before new sync - # of opened documents -> error occurs. ER is expected to be always never - # blocked, but still avoid possible error. - # - # pygls makes all exceptions on server side JsonRpcInternalError and they - # should be matched by text. - # Example: https://github.com/openlawlibrary/pygls/blob/main/tests/ - # lsp/test_errors.py#L108C24-L108C44 - raise Exception("Document is not opened") - - text = ls.workspace.get_text_document(params.uri).source - return runner_client.GetDocumentResult( - uri=params.uri, version=doc_info.version, text=text - ) - logger.info("initialized, adding workspace directories") - services.register_document_getter(get_document) - async def apply_workspace_edit(params): return await ls.workspace_apply_edit_async(params) @@ -216,6 +208,7 @@ def report_progress(token: str | int, value: Any): ls.progress(types.ProgressParams(token, value)) services.register_progress_reporter(report_progress) + services.register_debug_session_starter(partial(start_debug_session, ls)) try: async with asyncio.TaskGroup() as tg: @@ -256,20 +249,33 @@ def _on_shutdown(ls: LanguageServer, params): async def reset(ls: LanguageServer, params): logger.info("Reset WM") await global_state.server_initialized.wait() - ... -async def restart_extension_runner(ls: LanguageServer, params): - logger.info(f"restart extension runners {params}") +async def restart_extension_runner(ls: LanguageServer, tree_node, param2): + logger.info(f"restart extension runner {tree_node}") await global_state.server_initialized.wait() - params_dict = params[0] - runner_working_dir_str = params_dict["projectPath"] + runner_id = tree_node['projectPath'] + splitted_runner_id = runner_id.split('::') + runner_working_dir_str = splitted_runner_id[0] runner_working_dir_path = Path(runner_working_dir_str) + env_name = splitted_runner_id[-1] + + await runner_manager.restart_extension_runner(runner_working_dir_path=runner_working_dir_path, env_name=env_name, ws_context=global_state.ws_context) - await runner_manager.restart_extension_runners( - runner_working_dir_path, global_state.ws_context - ) + +async def restart_and_debug_extension_runner(ls: LanguageServer, tree_node, params2): + logger.info(f"restart and debug extension runner {tree_node} {params2}") + await global_state.server_initialized.wait() + + runner_id = tree_node['projectPath'] + splitted_runner_id = runner_id.split('::') + runner_working_dir_str = splitted_runner_id[0] + runner_working_dir_path = Path(runner_working_dir_str) + env_name = splitted_runner_id[-1] + + logger.info(f'start debugging {runner_working_dir_path} {runner_id} {env_name}') + await runner_manager.restart_extension_runner(runner_working_dir_path=runner_working_dir_path, env_name=env_name, ws_context=global_state.ws_context, debug=True) async def send_user_message_notification( @@ -294,4 +300,11 @@ async def send_user_message_request( ) +async def start_debug_session( + ls: LanguageServer, params +) -> None: + res = await ls.protocol.send_request_async('ide/startDebugging', params) + logger.info(f"started debugging: {res}") + + __all__ = ["create_lsp_server"] diff --git a/src/finecode/lsp_server/schemas.py b/src/finecode/lsp_server/schemas.py index 38b5860..f9d023e 100644 --- a/src/finecode/lsp_server/schemas.py +++ b/src/finecode/lsp_server/schemas.py @@ -44,7 +44,10 @@ class NodeType(IntEnum): DIRECTORY = 0 PROJECT = 1 ACTION = 2 - PRESET = 3 + ACTION_GROUP = 3 + PRESET = 4 + ENV_GROUP = 5 + ENV = 6 class ListActionsResponse(BaseModel): diff --git a/src/finecode/lsp_server/services.py b/src/finecode/lsp_server/services.py index 1af70db..a882700 100644 --- a/src/finecode/lsp_server/services.py +++ b/src/finecode/lsp_server/services.py @@ -38,14 +38,14 @@ def register_send_user_message_request_callback(send_user_message_request_callba user_messages._lsp_message_send = send_user_message_request_callback -def register_document_getter(get_document_func): - runner_manager.get_document = get_document_func - - def register_workspace_edit_applier(apply_workspace_edit_func): runner_manager.apply_workspace_edit = apply_workspace_edit_func +def register_debug_session_starter(start_debug_session_func): + runner_manager.start_debug_session = start_debug_session_func + + def register_progress_reporter(report_progress_func): global_state.progress_reporter = report_progress_func diff --git a/src/finecode/project_analyzer.py b/src/finecode/project_analyzer.py deleted file mode 100644 index 4578270..0000000 --- a/src/finecode/project_analyzer.py +++ /dev/null @@ -1,55 +0,0 @@ -from pathlib import Path - -from finecode import context -from finecode.services import run_service - - -class FailedToGetProjectFiles(Exception): - def __init__(self, message: str) -> None: - self.message = message - - -async def get_files_by_projects( - projects_dirs_paths: list[Path], ws_context: context.WorkspaceContext -) -> dict[Path, list[Path]]: - files_by_project_dir: dict[Path, list[Path]] = {} - actions_by_project = { - project_dir_path: ["list_project_files_by_lang"] - for project_dir_path in projects_dirs_paths - } - action_payload = {} - - try: - results_by_project = await run_service.run_actions_in_projects( - actions_by_project=actions_by_project, - action_payload=action_payload, - ws_context=ws_context, - concurrently=False, - result_format=run_service.RunResultFormat.JSON, - ) - except run_service.ActionRunFailed as exception: - # TODO: handle it overall - raise FailedToGetProjectFiles(exception.message) - - for project_dir_path, action_results in results_by_project.items(): - list_project_files_action_result = action_results["list_project_files_by_lang"] - if list_project_files_action_result.return_code != 0: - raise FailedToGetProjectFiles( - f"'list_project_files_by_lang' action ended in {project_dir_path} with return code {list_project_files_action_result.return_code}: {list_project_files_action_result.result}" - ) - project_files_by_lang = list_project_files_action_result.result - files_by_project_dir[project_dir_path] = [ - Path(file_path) - for file_path in project_files_by_lang["files_by_lang"].get("python", []) - ] - - return files_by_project_dir - - -async def get_project_files( - project_dir_path: Path, ws_context: context.WorkspaceContext -) -> list[Path]: - files_by_projects = await get_files_by_projects( - [project_dir_path], ws_context=ws_context - ) - return files_by_projects[project_dir_path] diff --git a/src/finecode/runner/_internal_client_api.py b/src/finecode/runner/_internal_client_api.py index 05ca77c..bb8abe1 100644 --- a/src/finecode/runner/_internal_client_api.py +++ b/src/finecode/runner/_internal_client_api.py @@ -2,11 +2,12 @@ Client API used only internally in runner manager or other modules of this package. They are not intended to be used in higher layers. """ +import pathlib from loguru import logger from finecode.runner import _internal_client_types -from finecode.runner.jsonrpc_client import client as jsonrpc_client +from finecode_jsonrpc import client as jsonrpc_client async def initialize( @@ -14,6 +15,7 @@ async def initialize( client_process_id: int, client_name: str, client_version: str, + client_workspace_dir: pathlib.Path ) -> None: logger.debug(f"Send initialize to server {client.readable_id}") await client.send_request( @@ -25,6 +27,7 @@ async def initialize( name=client_name, version=client_version ), trace=_internal_client_types.TraceValue.Verbose, + workspace_folders=[_internal_client_types.WorkspaceFolder(uri=f'file://{client_workspace_dir.as_posix()}', name=client_workspace_dir.name)] ), timeout=20, ) diff --git a/src/finecode/runner/_internal_client_types.py b/src/finecode/runner/_internal_client_types.py index 8013f55..b59843c 100644 --- a/src/finecode/runner/_internal_client_types.py +++ b/src/finecode/runner/_internal_client_types.py @@ -19,11 +19,11 @@ CANCEL_REQUEST = "$/cancelRequest" PROGRESS = "$/progress" TEXT_DOCUMENT_DID_CLOSE = "textDocument/didClose" +TEXT_DOCUMENT_DID_CHANGE = "textDocument/didChange" TEXT_DOCUMENT_DID_OPEN = "textDocument/didOpen" WORKSPACE_EXECUTE_COMMAND = "workspace/executeCommand" WORKSPACE_APPLY_EDIT = "workspace/applyEdit" -DOCUMENT_GET = "documents/get" PROJECT_RAW_CONFIG_GET = "projects/getRawConfig" @@ -159,6 +159,36 @@ class InitializedParams: pass +@dataclasses.dataclass +class GeneralClientCapabilities: + """General client capabilities. + + @since 3.16.0""" + + # Since: 3.16.0 + + position_encodings: collections.abc.Sequence[PositionEncodingKind | str] | None = None + """The position encodings supported by the client. Client and server + have to agree on the same position encoding to ensure that offsets + (e.g. character position in a line) are interpreted the same on both + sides. + + To keep the protocol backwards compatible the following applies: if + the value 'utf-16' is missing from the array of position encodings + servers can assume that the client supports UTF-16. UTF-16 is + therefore a mandatory encoding. + + If omitted it defaults to ['utf-16']. + + Implementation considerations: since the conversion from one encoding + into another requires the content of the file / line the conversion + is best done where the file is read which is usually on the server + side. + + @since 3.17.0""" + # Since: 3.17.0 + + @dataclasses.dataclass class ClientCapabilities: """Defines the capabilities provided by the client.""" @@ -178,7 +208,7 @@ class ClientCapabilities: # window: WindowClientCapabilities | None = None """Window specific client capabilities.""" - # general: GeneralClientCapabilities | None = None + general: GeneralClientCapabilities | None = None """General client capabilities. @since 3.16.0""" @@ -1339,48 +1369,93 @@ class StringValue: @dataclasses.dataclass -class GetDocumentParams: - uri: str +class GetProjectRawConfigParams: + project_def_path: str @dataclasses.dataclass -class GetDocumentRequest(BaseRequest): - params: GetDocumentParams - method = "documents/get" +class GetProjectRawConfigRequest(BaseRequest): + params: GetProjectRawConfigParams + method = "projects/getRawConfig" @dataclasses.dataclass -class GetDocumentResult(BaseResult): - uri: str - version: str - text: str +class GetProjectRawConfigResult(BaseResult): + # stringified json + config: str @dataclasses.dataclass -class GetDocumentResponse(BaseResponse): - result: GetDocumentResult +class GetProjectRawConfigResponse(BaseResponse): + result: GetProjectRawConfigResult @dataclasses.dataclass -class GetProjectRawConfigParams: - project_def_path: str +class VersionedTextDocumentIdentifier: + """A text document identifier to denote a specific version of a text document.""" + + version: int + """The version number of this document.""" + + uri: str + """The text document's uri.""" @dataclasses.dataclass -class GetProjectRawConfigRequest(BaseRequest): - params: GetProjectRawConfigParams - method = "projects/getRawConfig" +class TextDocumentContentChangePartial: + """@since 3.18.0""" + + # Since: 3.18.0 + + range: Range + """The range of the document that changed.""" + + text: str + """The new text for the provided range.""" + + range_length: int | None + """The optional length of the range that got replaced. + + @deprecated use range instead.""" @dataclasses.dataclass -class GetProjectRawConfigResult(BaseResult): - # stringified json - config: str +class TextDocumentContentChangeWholeDocument: + """@since 3.18.0""" + + # Since: 3.18.0 + + text: str + """The new text of the whole document.""" + + +TextDocumentContentChangeEvent = TextDocumentContentChangePartial | TextDocumentContentChangeWholeDocument +"""An event describing a change to a text document. If only a text is provided +it is considered to be the full content of the document.""" @dataclasses.dataclass -class GetProjectRawConfigResponse(BaseResponse): - result: GetProjectRawConfigResult +class DidChangeTextDocumentParams: + """The change text document notification's parameters.""" + + text_document: VersionedTextDocumentIdentifier + """The document that did change. The version number points + to the version after all provided content changes have + been applied.""" + + content_changes: collections.abc.Sequence[TextDocumentContentChangeEvent] + """The actual content changes. The content changes describe single state changes + to the document. So if there are two content changes c1 (at array index 0) and + c2 (at array index 1) for a document in state S then c1 moves the document from + S to S' and c2 from S' to S''. So c1 is computed on the state S and c2 is computed + on the state S'. + + To mirror the content of a document using change events use the following approach: + - start with the same initial content + - apply the 'textDocument/didChange' notifications in the order you receive them. + - apply the `TextDocumentContentChangeEvent`s in a single notification in the order + you receive them.""" + @dataclasses.dataclass @@ -1424,6 +1499,15 @@ class DidCloseTextDocumentNotification(BaseNotification): method = "textDocument/didClose" +@dataclasses.dataclass +class DidChangeTextDocumentNotification(BaseNotification): + """The document change notification is sent from the client to the server to signal + changes to a text document.""" + + params: DidChangeTextDocumentParams + method = "textDocument/didChange" + + @dataclasses.dataclass class CancelParams: id: int | str @@ -1481,13 +1565,12 @@ class ExitNotification(BaseNotification): ApplyWorkspaceEditResponse, ApplyWorkspaceEditResult, ), - DOCUMENT_GET: ( - GetDocumentRequest, - GetDocumentParams, - GetDocumentResponse, - GetDocumentResult, + PROJECT_RAW_CONFIG_GET: ( + GetProjectRawConfigRequest, + GetProjectRawConfigParams, + GetProjectRawConfigResponse, + GetProjectRawConfigResult, ), - PROJECT_RAW_CONFIG_GET: (GetProjectRawConfigRequest, GetProjectRawConfigParams, GetProjectRawConfigResponse, GetProjectRawConfigResult), TEXT_DOCUMENT_DID_OPEN: ( DidOpenTextDocumentNotification, DidOpenTextDocumentParams, @@ -1500,4 +1583,10 @@ class ExitNotification(BaseNotification): None, None, ), + TEXT_DOCUMENT_DID_CHANGE: ( + DidChangeTextDocumentNotification, + DidChangeTextDocumentParams, + None, + None, + ), } diff --git a/src/finecode/runner/runner_client.py b/src/finecode/runner/runner_client.py index 11f88c0..40b092c 100644 --- a/src/finecode/runner/runner_client.py +++ b/src/finecode/runner/runner_client.py @@ -15,13 +15,18 @@ from loguru import logger import finecode.domain as domain -from finecode.runner import jsonrpc_client, _internal_client_types, _internal_client_api +from finecode.runner import _internal_client_types, _internal_client_api +import finecode_jsonrpc as jsonrpc_client # reexport BaseRunnerRequestException = jsonrpc_client.BaseRunnerRequestException -GetDocumentParams = _internal_client_types.GetDocumentParams -GetDocumentResult = _internal_client_types.GetDocumentResult +DidChangeTextDocumentParams = _internal_client_types.DidChangeTextDocumentParams +VersionedTextDocumentIdentifier = _internal_client_types.VersionedTextDocumentIdentifier +TextDocumentContentChangeWholeDocument = _internal_client_types.TextDocumentContentChangeWholeDocument +TextDocumentContentChangePartial = _internal_client_types.TextDocumentContentChangePartial +Range = _internal_client_types.Range +Position = _internal_client_types.Position class ActionRunFailed(jsonrpc_client.BaseRunnerRequestException): ... @@ -71,6 +76,20 @@ class RunResultFormat(enum.Enum): STRING = "string" +class RunActionTrigger(enum.StrEnum): + USER = 'user' + SYSTEM = 'system' + UNKNOWN = 'unknown' + + +class DevEnv(enum.StrEnum): + IDE = 'ide' + CLI = 'cli' + AI = 'ai' + PRECOMMIT = 'precommit' + CI_CD = 'cicd' + + async def run_action( runner: ExtensionRunnerInfo, action_name: str, @@ -113,7 +132,11 @@ async def run_action( stringified_result = command_result["result"] # currently result is always dumped to json even if response format is expected to # be a string. See docs of ER lsp server for more details. - raw_result = json.loads(stringified_result) + try: + raw_result = json.loads(stringified_result) + except json.JSONDecodeError as exception: + raise ActionRunFailed(f"Failed to decode result json: {exception}") from exception + if command_result["format"] == "string": result = raw_result elif ( @@ -221,6 +244,12 @@ async def notify_document_did_close( ), ) +async def notify_document_did_change(runner: ExtensionRunnerInfo, change_params: _internal_client_types.DidChangeTextDocumentParams) -> None: + runner.client.notify( + method=_internal_client_types.TEXT_DOCUMENT_DID_CHANGE, + params=change_params, + ) + __all__ = [ "ActionRunFailed", diff --git a/src/finecode/runner/runner_manager.py b/src/finecode/runner/runner_manager.py index 2701b64..7f3e4c1 100644 --- a/src/finecode/runner/runner_manager.py +++ b/src/finecode/runner/runner_manager.py @@ -4,6 +4,7 @@ import asyncio import collections.abc +import concurrent.futures import json import os import shutil @@ -15,20 +16,21 @@ from finecode import context, domain, finecode_cmd from finecode.config import collect_actions, config_models, read_configs from finecode.runner import ( - jsonrpc_client, runner_client, _internal_client_api, _internal_client_types, ) -from finecode.runner.jsonrpc_client import _io_thread +import finecode_jsonrpc as jsonrpc_client +from finecode_jsonrpc import _io_thread from finecode.utils import iterable_subscribe project_changed_callback: ( typing.Callable[[domain.Project], collections.abc.Coroutine[None, None, None]] | None ) = None -get_document: typing.Callable[[], collections.abc.Coroutine] | None = None +# get_document: typing.Callable[[], collections.abc.Coroutine] | None = None apply_workspace_edit: typing.Callable[[], collections.abc.Coroutine] | None = None +start_debug_session: typing.Callable[[int], collections.abc.Coroutine] | None = None partial_results: iterable_subscribe.IterableSubscribe = ( iterable_subscribe.IterableSubscribe() ) @@ -76,13 +78,13 @@ def map_change_object(change): async def _start_extension_runner_process( - runner: runner_client.ExtensionRunnerInfo, ws_context: context.WorkspaceContext + runner: runner_client.ExtensionRunnerInfo, ws_context: context.WorkspaceContext, debug: bool = False ) -> None: try: python_cmd = finecode_cmd.get_python_cmd( runner.working_dir_path, runner.env_name ) - except ValueError: + except ValueError as exception: try: runner.status = runner_client.RunnerStatus.NO_VENV await notify_project_changed( @@ -96,7 +98,7 @@ async def _start_extension_runner_process( raise jsonrpc_client.RunnerFailedToStart( f"Runner '{runner.readable_id}' failed to start" - ) + ) from exception if ws_context.runner_io_thread is None: logger.trace("Starting IO Thread") @@ -112,23 +114,64 @@ async def _start_extension_runner_process( runner.env_name ] runner_config = env_config.runner_config - # TODO: also check whether lsp server is available, without it doesn't make sense - # to start with debugger - if runner_config.debug: + + start_with_debug = debug or runner_config.debug + if start_with_debug: process_args.append("--debug") - # TODO: find free port and pass it - process_args.append("--debug-port=5681") + debug_port_future = concurrent.futures.Future() + else: + debug_port_future = None process_args_str: str = " ".join(process_args) - client = await jsonrpc_client.create_lsp_client_io( - f"{python_cmd} -m finecode_extension_runner.cli start {process_args_str}", - runner.working_dir_path, - message_types=_internal_client_types.METHOD_TO_TYPES, - io_thread=ws_context.runner_io_thread, - readable_id=runner.readable_id, - ) + client = jsonrpc_client.JsonRpcClient(message_types=_internal_client_types.METHOD_TO_TYPES, readable_id=runner.readable_id) + + try: + await client.start(server_cmd=f"{python_cmd} -m finecode_extension_runner.cli start {process_args_str}", working_dir_path=runner.working_dir_path, io_thread=ws_context.runner_io_thread, debug_port_future=debug_port_future, connect=not start_with_debug) + except jsonrpc_client.RunnerFailedToStart as exception: + logger.error(f"Runner {runner.readable_id} failed to start: {exception.message}") + runner.status = runner_client.RunnerStatus.FAILED + runner.initialized_event.set() + raise exception + runner.client = client - # TODO: recognize started debugger and send command to lsp server + + if start_with_debug: + assert debug_port_future is not None + + # avoid blocking main thread? + debug_async_future = asyncio.wrap_future(future=debug_port_future) + try: + await asyncio.wait_for(debug_async_future, timeout=30) + except TimeoutError as exception: + runner.status = runner_client.RunnerStatus.FAILED + runner.initialized_event.set() + raise RunnerFailedToStart(f"Failed to get debugger port in 30 seconds: {runner.readable_id}") from exception + + debug_port = debug_async_future.result() + logger.info(f"debug port: {debug_port}") + + if start_debug_session is not None: + debug_params = { + "name": "Python: WM", + "type": "debugpy", + "request": "attach", + "connect": { + "host": "localhost", + "port": debug_port + }, + "justMyCode": False, + # "logToFile": True, + } + await start_debug_session(debug_params) + + try: + await client.connect_to_server(io_thread=ws_context.runner_io_thread, timeout=None) + except Exception as exception: # TODO: analyze which can occur + # TODO: analyze whether server process will always stop if connection + logger.error(f"Runner {runner.readable_id} failed to connect to server: {exception}") + runner.status = runner_client.RunnerStatus.FAILED + runner.initialized_event.set() + raise RunnerFailedToStart(str(exception)) from exception async def on_exit(): logger.debug(f"Extension Runner {runner.readable_id} exited") @@ -140,20 +183,20 @@ async def on_exit(): runner.client.server_exit_callback = on_exit - if get_document is not None: - runner.client.feature( - _internal_client_types.DOCUMENT_GET, - get_document, - ) - runner.client.feature( _internal_client_types.WORKSPACE_APPLY_EDIT, _apply_workspace_edit ) - async def on_progress(params: _internal_client_types.ProgressParams): + async def on_progress(params: _internal_client_types.ProgressParams) -> None: logger.debug(f"Got progress from runner for token: {params.token}") + try: + result_value = json.loads(params.value) + except json.JSONDecodeError as exception: + logger.error(f"Failed to decode partial result value json: {exception}") + return + partial_result = domain.PartialResult( - token=params.token, value=json.loads(params.value) + token=params.token, value=result_value ) partial_results.publish(partial_result) @@ -169,8 +212,8 @@ async def get_project_raw_config( project_raw_config = ws_context.ws_projects_raw_configs[ project_def_path.parent ] - except KeyError: - raise ValueError(f"Config of project '{project_def_path_str}' not found") + except KeyError as exception: + raise ValueError(f"Config of project '{project_def_path_str}' not found") from exception return _internal_client_types.GetProjectRawConfigResult( config=json.dumps(project_raw_config) ) @@ -187,7 +230,7 @@ async def stop_extension_runner(runner: runner_client.ExtensionRunnerInfo) -> No try: await _internal_client_api.shutdown(client=runner.client) except Exception as e: - logger.error(f"Failed to shutdown:") + logger.error(f"Failed to shutdown {runner.readable_id}:") logger.exception(e) await _internal_client_api.exit(client=runner.client) @@ -224,16 +267,27 @@ async def start_runners_with_presets( project_status = project.status if project_status == domain.ProjectStatus.CONFIG_VALID: # first check whether runner doesn't exist yet to avoid duplicates - project_runners = ws_context.ws_projects_extension_runners.get(project.dir_path, {}) - project_dev_workspace_runner = project_runners.get('dev_workspace', None) + project_runners = ws_context.ws_projects_extension_runners.get( + project.dir_path, {} + ) + project_dev_workspace_runner = project_runners.get( + "dev_workspace", None + ) start_new_runner = True - if project_dev_workspace_runner is not None and project_dev_workspace_runner.status in [runner_client.RunnerStatus.INITIALIZING, runner_client.RunnerStatus.RUNNING]: + if ( + project_dev_workspace_runner is not None + and project_dev_workspace_runner.status + in [ + runner_client.RunnerStatus.INITIALIZING, + runner_client.RunnerStatus.RUNNING, + ] + ): # start a new one only if: # - either there is no runner yet # or venv exist(=exclude `runner_client.RunnerStatus.NO_VENV`) # and runner is not initializing or running already start_new_runner = False - + if start_new_runner: task = tg.create_task( _start_dev_workspace_runner( @@ -276,7 +330,7 @@ async def start_runners_with_presets( except config_models.ConfigurationError as exception: raise jsonrpc_client.RunnerFailedToStart( f"Reading project config with presets and collecting actions in {project.dir_path} failed: {exception.message}" - ) + ) from exception # update config of dev_workspace runner, the new config contains resolved presets dev_workspace_runner = ws_context.ws_projects_extension_runners[ @@ -310,7 +364,7 @@ async def get_or_start_runners_with_presets( async def start_runner( - project_def: domain.Project, env_name: str, ws_context: context.WorkspaceContext + project_def: domain.Project, env_name: str, ws_context: context.WorkspaceContext, debug: bool = False ) -> runner_client.ExtensionRunnerInfo: # this function manages status of the runner and initialized event runner = runner_client.ExtensionRunnerInfo( @@ -321,7 +375,7 @@ async def start_runner( client=None, ) save_runner_in_context(runner=runner, ws_context=ws_context) - await _start_extension_runner_process(runner=runner, ws_context=ws_context) + await _start_extension_runner_process(runner=runner, ws_context=ws_context, debug=debug) try: await _init_lsp_client(runner=runner, project=project_def) @@ -348,7 +402,7 @@ async def start_runner( await notify_project_changed(project_def) raise jsonrpc_client.RunnerFailedToStart( f"Found problem in configuration of {project_def.dir_path}: {exception.message}" - ) + ) from exception await update_runner_config(runner=runner, project=project_def) await _finish_runner_init(runner=runner, project=project_def, ws_context=ws_context) @@ -363,9 +417,8 @@ async def start_runner( async def get_or_start_runner( project_def: domain.Project, env_name: str, ws_context: context.WorkspaceContext ) -> runner_client.ExtensionRunnerInfo: - runners_by_env = ws_context.ws_projects_extension_runners[project_def.dir_path] - try: + runners_by_env = ws_context.ws_projects_extension_runners[project_def.dir_path] runner = runners_by_env[env_name] logger.trace(f"Runner {runner.readable_id} found") except KeyError: @@ -397,7 +450,7 @@ async def get_or_start_runner( async def _start_dev_workspace_runner( project_def: domain.Project, ws_context: context.WorkspaceContext ) -> runner_client.ExtensionRunnerInfo: - return await start_runner( + return await get_or_start_runner( project_def=project_def, env_name="dev_workspace", ws_context=ws_context ) @@ -411,20 +464,21 @@ async def _init_lsp_client( client_process_id=os.getpid(), client_name="FineCode_WorkspaceManager", client_version="0.1.0", + client_workspace_dir=runner.working_dir_path ) - except jsonrpc_client.BaseRunnerRequestException as error: + except jsonrpc_client.BaseRunnerRequestException as exception: raise jsonrpc_client.RunnerFailedToStart( - f"Runner failed to initialize: {error.message}" - ) + f"Runner failed to initialize: {exception.message}" + ) from exception try: await _internal_client_api.notify_initialized(runner.client) - except Exception as error: - logger.error(f"Failed to notify runner about initialization: {error}") - logger.exception(error) + except Exception as exception: + logger.error(f"Failed to notify runner about initialization: {exception}") + logger.exception(exception) raise jsonrpc_client.RunnerFailedToStart( - f"Runner failed to notify about initialization: {error}" - ) + f"Runner failed to notify about initialization: {exception}" + ) from exception logger.debug(f"LSP Client for initialized: {runner.readable_id}") @@ -444,7 +498,7 @@ async def update_runner_config( runner.initialized_event.set() raise jsonrpc_client.RunnerFailedToStart( f"Runner failed to update config: {exception.message}" - ) + ) from exception logger.debug(f"Updated config of runner {runner.readable_id}") @@ -454,13 +508,15 @@ async def _finish_runner_init( project: domain.Project, ws_context: context.WorkspaceContext, ) -> None: + # TODO: save per runner only during initialization. But where to get data from + # in case of runner restart? await send_opened_files( runner=runner, opened_files=list(ws_context.opened_documents.values()) ) def save_runners_from_tasks_in_context( - tasks: list[asyncio.Task], ws_context: context.WorkspaceContext + tasks: list[asyncio.Task[runner_client.ExtensionRunnerInfo]], ws_context: context.WorkspaceContext ) -> None: extension_runners: list[runner_client.ExtensionRunnerInfo] = [ runner.result() for runner in tasks if runner is not None @@ -555,7 +611,6 @@ def remove_runner_venv(runner_dir: Path, env_name: str) -> None: async def restart_extension_runners( runner_working_dir_path: Path, ws_context: context.WorkspaceContext ) -> None: - # TODO: reload config? try: runners_by_env = ws_context.ws_projects_extension_runners[ runner_working_dir_path @@ -564,12 +619,35 @@ async def restart_extension_runners( logger.error(f"Cannot find runner for {runner_working_dir_path}") return + # TODO: parallel? for runner in runners_by_env.values(): - await stop_extension_runner(runner) + await restart_extension_runner(runner_working_dir_path=runner.working_dir_path, env_name=runner.env_name, ws_context=ws_context) - project_def = ws_context.ws_projects[runner.working_dir_path] - await start_runner( - project_def=project_def, - env_name=runner.env_name, - ws_context=ws_context, - ) + +async def restart_extension_runner( + runner_working_dir_path: Path, env_name: str, ws_context: context.WorkspaceContext, debug: bool = False +) -> None: + # TODO: reload config? + try: + runners_by_env = ws_context.ws_projects_extension_runners[ + runner_working_dir_path + ] + except KeyError: + logger.error(f"Cannot find runner for {runner_working_dir_path}") + return + + try: + runner = runners_by_env[env_name] + except KeyError: + logger.error(f"Cannot find runner for env {env_name} in {runner_working_dir_path}") + return + + await stop_extension_runner(runner) + + project_def = ws_context.ws_projects[runner.working_dir_path] + await start_runner( + project_def=project_def, + env_name=runner.env_name, + ws_context=ws_context, + debug=debug + ) \ No newline at end of file diff --git a/src/finecode/services/run_service/__init__.py b/src/finecode/services/run_service/__init__.py index f6e94f0..8bcb611 100644 --- a/src/finecode/services/run_service/__init__.py +++ b/src/finecode/services/run_service/__init__.py @@ -9,4 +9,23 @@ start_required_environments, run_actions_in_projects, RunResultFormat, + RunActionTrigger, + DevEnv ) + + +__all__ = [ + "ActionRunFailed", + "StartingEnvironmentsFailed", + "run_action", + "find_action_project_and_run", + "find_action_project_and_run_with_partial_results", + "find_projects_with_actions", + "find_all_projects_with_action", + "run_with_partial_results", + "start_required_environments", + "run_actions_in_projects", + "RunResultFormat", + "RunActionTrigger", + "DevEnv", +] \ No newline at end of file diff --git a/src/finecode/services/run_service/payload_preprocessor.py b/src/finecode/services/run_service/payload_preprocessor.py index 9eeffef..b95e57e 100644 --- a/src/finecode/services/run_service/payload_preprocessor.py +++ b/src/finecode/services/run_service/payload_preprocessor.py @@ -1,7 +1,7 @@ import pathlib import typing -from finecode import context, project_analyzer +from finecode import context async def preprocess_for_project( @@ -12,15 +12,7 @@ async def preprocess_for_project( ) -> dict[str, typing.Any]: processed_payload = payload.copy() - # temporary hardcore logic until we get the proper payload structure and defaults - # from extension runner - if action_name == "lint" or action_name == "format": - if "file_paths" not in processed_payload: - processed_payload["file_paths"] = None - - if action_name == "format" and "save" not in processed_payload: - processed_payload["save"] = True - elif action_name == "prepare_envs" or action_name == "prepare_runners": + if action_name == "prepare_envs" or action_name == "prepare_runners": runtime_venv_path = project_dir_path / ".venvs" / "runtime" project_def_path = project_dir_path / "pyproject.toml" envs = [ @@ -51,10 +43,4 @@ async def preprocess_for_project( ) processed_payload["envs"] = envs - for param, value in processed_payload.items(): - if param == "file_paths" and value is None: - processed_payload["file_paths"] = await project_analyzer.get_project_files( - project_dir_path, ws_context=ws_context - ) - return processed_payload diff --git a/src/finecode/services/run_service/proxy_utils.py b/src/finecode/services/run_service/proxy_utils.py index 27786bd..ef99de3 100644 --- a/src/finecode/services/run_service/proxy_utils.py +++ b/src/finecode/services/run_service/proxy_utils.py @@ -40,11 +40,11 @@ async def find_action_project( if project_status != domain.ProjectStatus.CONFIG_VALID: logger.info( f"Extension runner {project_path} has no valid config with finecode, " - f"status: {project_status.name}" + + f"status: {project_status.name}" ) raise ActionRunFailed( f"Project {project_path} has no valid config with finecode," - f"status: {project_status.name}" + + f"status: {project_status.name}" ) return project_path @@ -54,6 +54,8 @@ async def find_action_project_and_run( file_path: pathlib.Path, action_name: str, params: dict[str, typing.Any], + run_trigger: runner_client.RunActionTrigger, + dev_env: runner_client.DevEnv, ws_context: context.WorkspaceContext, ) -> runner_client.RunActionResponse: project_path = await find_action_project( @@ -68,6 +70,8 @@ async def find_action_project_and_run( project_def=project, ws_context=ws_context, preprocess_payload=False, + run_trigger=run_trigger, + dev_env=dev_env, ) except ActionRunFailed as exception: raise exception @@ -85,9 +89,9 @@ async def run_action_in_runner( response = await runner_client.run_action( runner=runner, action_name=action_name, params=params, options=options ) - except runner_client.BaseRunnerRequestException as error: - logger.error(f"Error on running action {action_name}: {error.message}") - raise ActionRunFailed(error.message) + except runner_client.BaseRunnerRequestException as exception: + logger.error(f"Error on running action {action_name}: {exception.message}") + raise ActionRunFailed(exception.message) from exception return response @@ -142,13 +146,18 @@ async def run_action_and_notify( runner: runner_client.ExtensionRunnerInfo, result_list: AsyncList, partial_results_task: asyncio.Task, + run_trigger: runner_client.RunActionTrigger, + dev_env: runner_client.DevEnv, ) -> runner_client.RunActionResponse: try: return await run_action_in_runner( action_name=action_name, params=params, runner=runner, - options={"partial_result_token": partial_result_token}, + options={ + "partial_result_token": partial_result_token, + "meta": {"trigger": run_trigger.value, "dev_env": dev_env.value}, + }, ) finally: result_list.end() @@ -173,6 +182,8 @@ async def run_with_partial_results( params: dict[str, typing.Any], partial_result_token: int | str, project_dir_path: pathlib.Path, + run_trigger: runner_client.RunActionTrigger, + dev_env: runner_client.DevEnv, ws_context: context.WorkspaceContext, ) -> collections.abc.AsyncIterator[ collections.abc.AsyncIterable[domain.PartialResultRawValue] @@ -212,6 +223,8 @@ async def run_with_partial_results( runner=runner, result_list=result, partial_results_task=partial_results_task, + run_trigger=run_trigger, + dev_env=dev_env, ) ) @@ -237,6 +250,8 @@ async def find_action_project_and_run_with_partial_results( action_name: str, params: dict[str, typing.Any], partial_result_token: int | str, + run_trigger: runner_client.RunActionTrigger, + dev_env: runner_client.DevEnv, ws_context: context.WorkspaceContext, ) -> collections.abc.AsyncIterator[runner_client.RunActionRawResult]: logger.trace(f"Run {action_name} on {file_path}") @@ -248,6 +263,8 @@ async def find_action_project_and_run_with_partial_results( params=params, partial_result_token=partial_result_token, project_dir_path=project_path, + run_trigger=run_trigger, + dev_env=dev_env, ws_context=ws_context, ) @@ -343,8 +360,12 @@ async def _start_runner_or_update_config( runner_exist = env_name in existing_runners start_runner = True if runner_exist: + runner = existing_runners[env_name] + if runner.status == runner_client.RunnerStatus.INITIALIZING: + await runner.initialized_event.wait() + runner_is_running = ( - existing_runners[env_name].status == runner_client.RunnerStatus.RUNNING + runner.status == runner_client.RunnerStatus.RUNNING ) start_runner = not runner_is_running @@ -381,6 +402,8 @@ async def run_actions_in_running_project( ws_context: context.WorkspaceContext, concurrently: bool, result_format: RunResultFormat, + run_trigger: runner_client.RunActionTrigger, + dev_env: runner_client.DevEnv, ) -> dict[str, RunActionResponse]: result_by_action: dict[str, RunActionResponse] = {} @@ -395,6 +418,8 @@ async def run_actions_in_running_project( params=action_payload, project_def=project, ws_context=ws_context, + run_trigger=run_trigger, + dev_env=dev_env, result_format=result_format, ) ) @@ -420,18 +445,20 @@ async def run_actions_in_running_project( params=action_payload, project_def=project, ws_context=ws_context, + run_trigger=run_trigger, + dev_env=dev_env, result_format=result_format, ) except ActionRunFailed as exception: raise ActionRunFailed( f"Running of action {action_name} failed: {exception.message}" - ) + ) from exception except Exception as error: logger.error("Unexpected exception") logger.exception(error) raise ActionRunFailed( f"Running of action {action_name} failed with unexpected exception" - ) + ) from error result_by_action[action_name] = run_result @@ -444,6 +471,8 @@ async def run_actions_in_projects( ws_context: context.WorkspaceContext, concurrently: bool, result_format: RunResultFormat, + run_trigger: runner_client.RunActionTrigger, + dev_env: runner_client.DevEnv, ) -> dict[pathlib.Path, dict[str, RunActionResponse]]: project_handler_tasks: list[asyncio.Task] = [] try: @@ -458,6 +487,8 @@ async def run_actions_in_projects( ws_context=ws_context, concurrently=concurrently, result_format=result_format, + run_trigger=run_trigger, + dev_env=dev_env, ) ) project_handler_tasks.append(project_task) @@ -496,6 +527,8 @@ def find_projects_with_actions( RunResultFormat = runner_client.RunResultFormat RunActionResponse = runner_client.RunActionResponse +RunActionTrigger = runner_client.RunActionTrigger +DevEnv = runner_client.DevEnv async def run_action( @@ -503,7 +536,9 @@ async def run_action( params: dict[str, typing.Any], project_def: domain.Project, ws_context: context.WorkspaceContext, - result_format: RunResultFormat = RunResultFormat.JSON, + run_trigger: runner_client.RunActionTrigger, + dev_env: runner_client.DevEnv, + result_format: runner_client.RunResultFormat = RunResultFormat.JSON, preprocess_payload: bool = True, ) -> RunActionResponse: formatted_params = str(params) @@ -514,7 +549,7 @@ async def run_action( if project_def.status != domain.ProjectStatus.CONFIG_VALID: raise ActionRunFailed( f"Project {project_def.dir_path} has no valid configuration and finecode." - " Please check logs." + + " Please check logs." ) if preprocess_payload: @@ -551,6 +586,8 @@ async def run_action( env_name=env_name, project_def=project_def, ws_context=ws_context, + run_trigger=run_trigger, + dev_env=dev_env, result_format=result_format, ) else: @@ -568,6 +605,8 @@ async def run_action( env_name=handler.env, project_def=project_def, ws_context=ws_context, + run_trigger=run_trigger, + dev_env=dev_env, result_format=result_format, ) @@ -580,7 +619,9 @@ async def _run_action_in_env_runner( env_name: str, project_def: domain.Project, ws_context: context.WorkspaceContext, - result_format: RunResultFormat = RunResultFormat.JSON, + run_trigger: runner_client.RunActionTrigger, + dev_env: runner_client.DevEnv, + result_format: runner_client.RunResultFormat = RunResultFormat.JSON, ): try: runner = await runner_manager.get_or_start_runner( @@ -589,14 +630,17 @@ async def _run_action_in_env_runner( except runner_manager.RunnerFailedToStart as exception: raise ActionRunFailed( f"Runner {env_name} in project {project_def.dir_path} failed: {exception.message}" - ) + ) from exception try: response = await runner_client.run_action( runner=runner, action_name=action_name, params=payload, - options={"result_format": result_format}, + options={ + "result_format": result_format, + "meta": {"trigger": run_trigger.value, "dev_env": dev_env.value}, + }, ) except runner_client.BaseRunnerRequestException as error: await user_messages.error(