import threading
from http.server import HTTPServer
from pathlib import Path
from typing import Any
from typing import Dict
from typing import List
from typing import Optional
import ipywidgets as widgets
import jupytext
from IPython.display import display
from ipykernel.comm import Comm
from jsonrpcclient import request
from jupyter_ascending._environment import EXECUTE_HOST_URL
from jupyter_ascending.handlers import ServerMethods
from jupyter_ascending.handlers import generate_request_handler
from jupyter_ascending.json_requests import ExecuteRequest
from jupyter_ascending.json_requests import FocusCellRequest
from jupyter_ascending.json_requests import SyncRequest
from jupyter_ascending.logger import J_LOGGER
from jupyter_ascending.notebook.data_types import JupyterCell
from jupyter_ascending.notebook.data_types import NotebookContents
from jupyter_ascending.notebook.merge import OpCodeAction
from jupyter_ascending.notebook.merge import OpCodes
from jupyter_ascending.notebook.merge import opcode_merge_cell_contents
from jupyter_ascending.utils import find_free_port
COMM_NAME = "AUTO_SYNC::notebook"
_JupyterComm = None
notebook_server_methods = ServerMethods("JupyterNotebook Start", "JupyterNotebook Close")
@J_LOGGER.catch
def start_notebook_server_in_thread(
notebook_name: str, server, file_watcher_enabled: bool = False, status_widget=None
):
"""
Args:
notebook_name: The name of the notebook you want to be syncing in this process.
file_watcher_enabled: If you're going to fire off events from a file watcher in your editor (like in PyCharm),
then you don't need to enable this. It will just use the same HTTP requests as normal
"""
notebook_path = Path(notebook_name).absolute()
if not status_widget:
status_widget = widgets.Text()
status_widget.style.description_width = "300px"
display(status_widget)
if file_watcher_enabled:
assert False, "Currently unsupported."
from watchdog.observers import Observer
from jupyter_ascending.watchers.file_watcher import NotebookEventHandler
event_handler = NotebookEventHandler(str(notebook_path.absolute()), file_watcher_enabled)
file_observer = Observer()
abs_path = str(notebook_path.parent.absolute())
file_observer.schedule(event_handler, abs_path, recursive=False)
file_watcher_thread = threading.Thread(target=file_observer.start, args=tuple())
file_watcher_thread.start()
# TODO: This might be a race condition if a bunch of these started at once...
notebook_server_port = find_free_port()
notebook_executor = HTTPServer(("localhost", notebook_server_port), NotebookKernelRequestHandler,)
notebook_executor_thread = threading.Thread(target=notebook_executor.serve_forever, args=tuple())
notebook_executor_thread.start()
J_LOGGER.info("IPYTHON: Registering notebook {}", notebook_path)
request(
EXECUTE_HOST_URL,
server.register_notebook_server.__name__,
# Params
notebook_path=str(notebook_path),
port_number=notebook_server_port,
)
J_LOGGER.info("==> Success")
make_comm()
return status_widget
def status_func(comm, open_msg):
@comm.on_msg
def _recv(msg):
print(msg)
J_LOGGER.warning(msg)
@notebook_server_methods.add
def handle_execute_request(data: dict) -> str:
request = ExecuteRequest(**data)
comm = get_comm()
execute_cell_contents(comm, request.cell_index)
return f"Executing cell `{request.cell_index}`"
@notebook_server_methods.add
def handle_sync_request(data: dict) -> str:
request = SyncRequest(**data)
comm = get_comm()
result = jupytext.reads(request.contents, fmt="py:percent")
update_cell_contents(comm, result)
return "Syncing all cells"
@notebook_server_methods.add
def handle_focus_cell_request(data: dict) -> str:
request = FocusCellRequest(**data)
print(request)
raise NotImplementedError
@notebook_server_methods.add
def handle_get_status_request(data: dict) -> str:
J_LOGGER.info("Attempting get_status")
comm = get_comm()
comm.send({"command": "get_status"})
J_LOGGER.info("Sent get_status")
return f"Updating status"
NotebookKernelRequestHandler = generate_request_handler("NotebookKernel", notebook_server_methods)
def make_comm() -> None:
global _JupyterComm
J_LOGGER.info("IPYTHON: Registering Comms")
comm_target_name = COMM_NAME
jupyter_comm = Comm(target_name=comm_target_name)
def _get_command(msg) -> Optional[str]:
return msg["content"]["data"].get("command", None)
@jupyter_comm.on_msg
def _recv(msg):
if _get_command(msg) == "merge_notebooks":
J_LOGGER.info("GOT UPDATE STATUS")
merge_notebooks(jupyter_comm, msg["content"]["data"])
return
J_LOGGER.info("~~~~~~~~~~~~~~~~~~~~~~~~~~~~~")
J_LOGGER.info(msg)
J_LOGGER.info("~~~~~~~~~~~~~~~~~~~~~~~~~~~~~")
# store comm for access in this thread later
_JupyterComm = jupyter_comm
J_LOGGER.info("==> Success")
return _JupyterComm
def get_comm():
# global _JupyterComm
# assert _JupyterComm, "Uh, how did we get None..."
# return _JupyterComm
return make_comm()
def update_cell_contents(comm: Comm, result: Dict[str, Any]) -> None:
# J_LOGGER.info(Javascript("Jupyter.notebook.get_cells()"))
def _transform_jupytext_cells(jupytext_cells) -> List[Dict[str, Any]]:
return [
{"index": i, "output": [], **{k: v for (k, v) in x.items() if k not in {"outputs", "metadata"}}}
for i, x in enumerate(result["cells"])
]
comm.send({"command": "start_sync_notebook", "cells": _transform_jupytext_cells(result["cells"])})
# contents = NotebookContents(cells=result["cells"])
def get_output_text(javascript_cell) -> Optional[str]:
output_tuple = javascript_cell.get("outputs", tuple())
if not output_tuple:
return None
output = output_tuple[0]
if output.get("data", None):
data = output["data"]
if isinstance(data, dict):
if data.get("text/plain", None):
return data["text/plain"]
if output.get("text", None):
return output["text"]
return None
@J_LOGGER.catch(reraise=True)
def merge_notebooks(comm: Comm, result: Dict[str, Any]) -> None:
javascript_cells = result["javascript_cells"]
current_notebook = NotebookContents(
cells=[
JupyterCell(
index=i,
cell_type=x["cell_type"],
source=x["source"],
output=get_output_text(x),
# metadata=x["metadata"],
)
for i, x in enumerate(javascript_cells)
]
)
new_notebook = NotebookContents(cells=[JupyterCell(**x) for x in result["new_notebook"]])
opcodes = opcode_merge_cell_contents(current_notebook, new_notebook)
J_LOGGER.info("Performing Opcodes...")
J_LOGGER.info(opcodes)
net_shift = 0
for op_action in opcodes:
net_shift = perform_op_code(comm, op_action, current_notebook, new_notebook, net_shift)
def perform_op_code(
comm: Comm,
op_action: OpCodeAction,
current_notebook: NotebookContents,
updated_notebook: NotebookContents,
net_shift: int,
) -> int:
"""
net_shift (int): Tracks the net shift of previous op codes since we can't apply all the operations at the same time to jupyter,
since it does not have that kind of editting model.
So what we do is make sure that as we delete and insert, we keep track of the shifts that have happened thus far.
Given this shift, we will shift the actions that we tell Jupyter notebook to do.
"""
if op_action.op_code == OpCodes.EQUAL:
pass
elif op_action.op_code == OpCodes.DELETE:
J_LOGGE
没有合适的资源?快使用搜索试试~ 我知道了~
温馨提示
共98个文件
json:65个
py:26个
js:2个
资源分类:Python库 所属语言:Python 资源全名:jupyter_ascending-0.1.6.tar.gz 资源来源:官方 安装方法:https://lanzao.blog.csdn.net/article/details/101784059
资源推荐
资源详情
资源评论
收起资源包目录
jupyter_ascending-0.1.6.tar.gz (98个子文件)
jupyter_ascending-0.1.6
PKG-INFO 697B
pyproject.toml 638B
LICENSE 1KB
setup.py 1KB
jupyter_ascending
_frontend.py 250B
handlers
jupyter_notebook.py 11KB
__init__.py 2KB
jupyter_server.py 4KB
requests
get_status.py 545B
__init__.py 423B
execute.py 1KB
sync.py 1KB
labextension
jupyter_ascending-0.1.0.tgz 7KB
widget.py 1KB
json_requests.py 415B
utils.py 1KB
logger.py 523B
tests
test_nbextension_path.py 466B
__init__.py 0B
test_cell_matching.py 6KB
notebook
evolve.py 830B
__init__.py 0B
data_types.py 2KB
merge.py 8KB
watchers
file_watcher.py 1KB
__init__.py 747B
_environment.py 418B
nbextension
static
index.js 10KB
index.js.map 28KB
extension.js 5KB
__init__.py 370B
extension.py 2KB
_version.py 253B
.mypy_cache
3.8
enum.meta.json 1KB
io.data.json 79KB
builtins.data.json 675KB
contextlib.data.json 49KB
types.meta.json 2KB
@plugins_snapshot.json 2B
collections
__init__.meta.json 2KB
__init__.data.json 223KB
abc.meta.json 2KB
abc.data.json 3KB
ast.data.json 30KB
sys.data.json 59KB
io.meta.json 2KB
mmap.meta.json 1KB
inspect.data.json 248KB
time.meta.json 1KB
codecs.meta.json 2KB
_importlib_modulespec.meta.json 1KB
_ast.meta.json 1KB
json
decoder.data.json 13KB
encoder.data.json 8KB
__init__.meta.json 2KB
decoder.meta.json 1KB
__init__.data.json 15KB
encoder.meta.json 1KB
threading.data.json 58KB
os
__init__.meta.json 2KB
path.meta.json 1KB
path.data.json 70KB
__init__.data.json 211KB
codecs.data.json 110KB
types.data.json 91KB
configparser.data.json 92KB
typing.data.json 368KB
time.data.json 43KB
posix.data.json 72KB
string.meta.json 1KB
mmap.data.json 30KB
zipimport.data.json 11KB
_importlib_modulespec.data.json 13KB
pkg_resources
__init__.meta.json 2KB
__init__.data.json 126KB
inspect.meta.json 2KB
typing.meta.json 2KB
builtins.meta.json 2KB
string.data.json 15KB
zipimport.meta.json 2KB
posix.meta.json 1KB
configparser.meta.json 2KB
threading.meta.json 2KB
enum.data.json 36KB
sys.meta.json 2KB
logging
__init__.meta.json 2KB
__init__.data.json 115KB
contextlib.meta.json 2KB
abc.meta.json 1KB
abc.data.json 13KB
_thread.data.json 34KB
_thread.meta.json 2KB
_ast.data.json 103KB
ast.meta.json 1KB
importlib
__init__.meta.json 2KB
__init__.data.json 5KB
abc.meta.json 2KB
abc.data.json 34KB
共 98 条
- 1
资源评论
挣扎的蓝藻
- 粉丝: 12w+
- 资源: 15万+
上传资源 快速赚钱
- 我的内容管理 展开
- 我的资源 快来上传第一个资源
- 我的收益 登录查看自己的收益
- 我的积分 登录查看自己的积分
- 我的C币 登录后查看C币余额
- 我的收藏
- 我的下载
- 下载帮助
安全验证
文档复制为VIP权益,开通VIP直接复制
信息提交成功