init
This commit is contained in:
commit
847cab47f0
108
.gitignore
vendored
Normal file
108
.gitignore
vendored
Normal file
@ -0,0 +1,108 @@
|
||||
# Byte-compiled / optimized / DLL files
|
||||
__pycache__/
|
||||
*.py[cod]
|
||||
*$py.class
|
||||
|
||||
# C extensions
|
||||
*.so
|
||||
|
||||
# Distribution / packaging
|
||||
.Python
|
||||
build/
|
||||
develop-eggs/
|
||||
dist/
|
||||
downloads/
|
||||
eggs/
|
||||
.eggs/
|
||||
lib/
|
||||
lib64/
|
||||
parts/
|
||||
sdist/
|
||||
var/
|
||||
wheels/
|
||||
*.egg-info/
|
||||
.installed.cfg
|
||||
*.egg
|
||||
MANIFEST
|
||||
|
||||
# PyInstaller
|
||||
# Usually these files are written by a python script from a template
|
||||
# before PyInstaller builds the exe, so as to inject date/other infos into it.
|
||||
*.manifest
|
||||
*.spec
|
||||
|
||||
# Installer logs
|
||||
pip-log.txt
|
||||
pip-delete-this-directory.txt
|
||||
|
||||
# Unit test / coverage reports
|
||||
htmlcov/
|
||||
.tox/
|
||||
.coverage
|
||||
.coverage.*
|
||||
.cache
|
||||
nosetests.xml
|
||||
coverage.xml
|
||||
*.cover
|
||||
.hypothesis/
|
||||
.pytest_cache/
|
||||
|
||||
# Translations
|
||||
*.mo
|
||||
*.pot
|
||||
|
||||
# Django stuff:
|
||||
*.log
|
||||
local_settings.py
|
||||
db.sqlite3
|
||||
|
||||
# Flask stuff:
|
||||
instance/
|
||||
.webassets-cache
|
||||
|
||||
# Scrapy stuff:
|
||||
.scrapy
|
||||
|
||||
# Sphinx documentation
|
||||
docs/_build/
|
||||
|
||||
# PyBuilder
|
||||
target/
|
||||
|
||||
# Jupyter Notebook
|
||||
.ipynb_checkpoints
|
||||
|
||||
# pyenv
|
||||
.python-version
|
||||
|
||||
# celery beat schedule file
|
||||
celerybeat-schedule
|
||||
|
||||
# SageMath parsed files
|
||||
*.sage.py
|
||||
|
||||
# Environments
|
||||
.env
|
||||
.venv
|
||||
env/
|
||||
venv/
|
||||
ENV/
|
||||
env.bak/
|
||||
venv.bak/
|
||||
|
||||
# Spyder project settings
|
||||
.spyderproject
|
||||
.spyproject
|
||||
|
||||
# Rope project settings
|
||||
.ropeproject
|
||||
|
||||
# mkdocs documentation
|
||||
/site
|
||||
|
||||
# mypy
|
||||
.mypy_cache/
|
||||
|
||||
# add
|
||||
.idea/
|
||||
token.json
|
189
main.py
Normal file
189
main.py
Normal file
@ -0,0 +1,189 @@
|
||||
from textual import events
|
||||
from textual.app import App, ComposeResult
|
||||
from textual.widgets import Input, Log
|
||||
from textual.containers import Horizontal, Vertical, Widget
|
||||
from collections import deque
|
||||
import sys
|
||||
import asyncio
|
||||
import argparse
|
||||
import pikpakFs
|
||||
import logging
|
||||
import functools
|
||||
|
||||
class TextualLogHandler(logging.Handler):
|
||||
def __init__(self, log_widget: Log):
|
||||
super().__init__()
|
||||
self.log_widget = log_widget
|
||||
|
||||
def emit(self, record):
|
||||
message = self.format(record)
|
||||
self.log_widget.write_line(message)
|
||||
|
||||
class HistoryInput(Input):
|
||||
def __init__(self, placeholder: str = "", max_history: int = 20, *args, **kwargs):
|
||||
super().__init__(placeholder=placeholder, *args, **kwargs)
|
||||
self.block_input = False
|
||||
self.history = deque(maxlen=max_history) # 历史记录列表
|
||||
self.history_view = list()
|
||||
self.history_index = -1 # 当前历史索引,初始为 -1
|
||||
self.history_log = Log(auto_scroll=False) # 用于显示历史记录的日志小部件
|
||||
|
||||
def widget(self) -> Widget:
|
||||
return Vertical(self, self.history_log)
|
||||
|
||||
def reverseIdx(self, idx) -> int:
|
||||
return len(self.history) - 1 - idx
|
||||
|
||||
async def on_key(self, event: events.Key) -> None:
|
||||
if self.block_input:
|
||||
return
|
||||
if event.key == "up":
|
||||
if self.history_index == -1:
|
||||
self.cursor_position = len(self.value)
|
||||
await self.update_history_view()
|
||||
return
|
||||
self.history_index = max(0, self.history_index - 1)
|
||||
elif event.key == "down":
|
||||
self.history_index = min(len(self.history) - 1, self.history_index + 1)
|
||||
else:
|
||||
self.history_index = -1
|
||||
await self.update_history_view()
|
||||
return
|
||||
|
||||
if len(self.history) > 0 and self.history_index != -1:
|
||||
self.value = self.history[self.reverseIdx(self.history_index)]
|
||||
self.cursor_position = len(self.value)
|
||||
await self.update_history_view()
|
||||
|
||||
async def on_input_submitted(self, event: Input.Submitted) -> None:
|
||||
user_input = event.value.strip()
|
||||
if user_input:
|
||||
self.history.append(user_input)
|
||||
self.history_index = -1
|
||||
self.value = ""
|
||||
await self.update_history_view()
|
||||
|
||||
async def update_history_view(self):
|
||||
self.history_log.clear()
|
||||
self.history_view.clear()
|
||||
|
||||
if self.history:
|
||||
for idx, item in enumerate(self.history):
|
||||
prefix = "> " if self.reverseIdx(idx) == self.history_index else " "
|
||||
self.history_view.append(f"{prefix}{item}")
|
||||
|
||||
self.history_log.write_lines(reversed(self.history_view))
|
||||
|
||||
scroll_height = self.history_log.scrollable_size.height
|
||||
scroll_start = self.history_log.scroll_offset.y
|
||||
current = self.history_index
|
||||
|
||||
if current < scroll_start:
|
||||
scroll_idx = min(max(0, current), len(self.history) - 1)
|
||||
self.history_log.scroll_to(y = scroll_idx)
|
||||
elif current >= scroll_start + scroll_height - 1:
|
||||
self.history_log.scroll_to(y = current - scroll_height + 1)
|
||||
|
||||
self.refresh()
|
||||
|
||||
async def animate_ellipsis(self):
|
||||
ellipsis = ""
|
||||
try:
|
||||
while True:
|
||||
# 循环添加省略号(最多3个点)
|
||||
if len(ellipsis) < 3:
|
||||
ellipsis += "."
|
||||
else:
|
||||
ellipsis = ""
|
||||
self.value = f"Waiting{ellipsis}"
|
||||
await asyncio.sleep(0.5)
|
||||
finally:
|
||||
self.value = ""
|
||||
pass
|
||||
|
||||
async def wait_for(self, operation):
|
||||
self.disabled = True
|
||||
self.block_input = True
|
||||
animation_task = asyncio.create_task(self.animate_ellipsis())
|
||||
await operation()
|
||||
animation_task.cancel()
|
||||
self.disabled = False
|
||||
self.block_input = False
|
||||
self.focus()
|
||||
|
||||
|
||||
class InputLoggerApp(App):
|
||||
CSS = """
|
||||
.divider {
|
||||
width: 0.5%;
|
||||
height: 100%;
|
||||
background: #444444;
|
||||
}
|
||||
.log {
|
||||
width: 80%;
|
||||
height: 100%;
|
||||
}
|
||||
"""
|
||||
|
||||
def setup_logger(self) -> None:
|
||||
formatStr = '%(asctime)s - %(name)s - %(levelname)s - %(message)s'
|
||||
|
||||
logging.basicConfig(
|
||||
filename='app.log',
|
||||
filemode='a',
|
||||
format=formatStr
|
||||
)
|
||||
|
||||
logHandler = TextualLogHandler(self.log_widget)
|
||||
|
||||
# 设置日志格式
|
||||
logHandler.setFormatter(logging.Formatter(formatStr))
|
||||
|
||||
# 获取根日志记录器,并添加自定义处理器
|
||||
root_logger = logging.getLogger()
|
||||
root_logger.setLevel(logging.INFO)
|
||||
root_logger.addHandler(logHandler)
|
||||
|
||||
def write_to_console(self, content) -> None:
|
||||
self.log_widget.write_line(content)
|
||||
|
||||
def compose(self) -> ComposeResult:
|
||||
self.input_widget = HistoryInput(placeholder="Input Command...")
|
||||
self.log_widget = Log(classes="log", highlight=True)
|
||||
|
||||
left_panel = self.input_widget.widget()
|
||||
right_panel = self.log_widget
|
||||
divider = Vertical(classes="divider")
|
||||
|
||||
yield Horizontal(left_panel, divider, right_panel)
|
||||
|
||||
def on_mount(self) -> None:
|
||||
self.setup_logger()
|
||||
self.fs = pikpakFs.VirtFs("", "", "", loginCachePath = "token.json")
|
||||
|
||||
async def handle_command(self, command) -> None:
|
||||
try:
|
||||
if command == "clear":
|
||||
self.log_widget.clear()
|
||||
elif command == "exit":
|
||||
sys.exit(0)
|
||||
elif command == "debug":
|
||||
logger = logging.getLogger()
|
||||
logger.setLevel(logging.DEBUG)
|
||||
self.write_to_console("Done")
|
||||
else:
|
||||
self.write_to_console(await self.fs.HandlerCommand(command))
|
||||
except Exception as e:
|
||||
logging.exception(e)
|
||||
|
||||
async def on_input_submitted(self, event: Input.Submitted) -> None:
|
||||
if event.input is not self.input_widget:
|
||||
return
|
||||
|
||||
user_input = event.value.strip()
|
||||
self.write_to_console(f"> {user_input}")
|
||||
await self.input_widget.wait_for(functools.partial(self.handle_command, user_input))
|
||||
|
||||
if __name__ == "__main__":
|
||||
app = InputLoggerApp()
|
||||
app.run()
|
266
pikpakFs.py
Normal file
266
pikpakFs.py
Normal file
@ -0,0 +1,266 @@
|
||||
import httpx
|
||||
from hashlib import md5
|
||||
from pikpakapi import PikPakApi
|
||||
from typing import Dict
|
||||
from datetime import datetime
|
||||
import json
|
||||
import re
|
||||
import os
|
||||
import logging
|
||||
|
||||
class PathWalker():
|
||||
def __init__(self, pathStr : str, subDir : str = None, sep : str = "/"):
|
||||
self.pathSpots : list[str] = []
|
||||
pathStr = pathStr.strip()
|
||||
if not pathStr.startswith(sep):
|
||||
self.pathSpots.append(".")
|
||||
pathSpots = [spot.strip() for spot in pathStr.split(sep) if spot.strip() != ""]
|
||||
self.pathSpots.extend(pathSpots)
|
||||
if subDir != None:
|
||||
self.pathSpots.append(subDir)
|
||||
|
||||
def IsAbsolute(self) -> bool:
|
||||
return len(self.pathSpots) == 0 or self.pathSpots[0] != "."
|
||||
|
||||
class VirtFsNode:
|
||||
def __init__(self, id : str, name : str, fatherId : str):
|
||||
self.id = id
|
||||
self.name = name
|
||||
self.fatherId = fatherId
|
||||
|
||||
class DirNode(VirtFsNode):
|
||||
def __init__(self, id : str, name : str, fatherId : str, childrenId : list[str]):
|
||||
super().__init__(id, name, fatherId)
|
||||
self.childrenId = childrenId
|
||||
self.lastUpdate : datetime = None
|
||||
|
||||
|
||||
class FileNode(VirtFsNode):
|
||||
def __init__(self, id : str, name : str, fatherId : str):
|
||||
super().__init__(id, name, fatherId)
|
||||
self.lastUpdate : datetime = None
|
||||
|
||||
class PikpakToken:
|
||||
def __init__(self, username, password, access_token, refresh_token, user_id):
|
||||
self.username = username
|
||||
self.password = password
|
||||
self.access_token = access_token
|
||||
self.refresh_token = refresh_token
|
||||
self.user_id = user_id
|
||||
|
||||
def to_json(self):
|
||||
return json.dumps(self.__dict__)
|
||||
|
||||
@classmethod
|
||||
def from_json(cls, json_str):
|
||||
data = json.loads(json_str)
|
||||
return cls(**data)
|
||||
|
||||
class VirtFs:
|
||||
def __CalcMd5(self, text : str):
|
||||
return md5(text.encode()).hexdigest()
|
||||
|
||||
def __init__(self, username : str, password : str, proxy : str = None, loginCachePath : str = None):
|
||||
httpx_client_args = None
|
||||
if proxy != None:
|
||||
httpx_client_args = {
|
||||
"proxy": proxy,
|
||||
"transport": httpx.AsyncHTTPTransport(retries=1),
|
||||
}
|
||||
|
||||
self.client = PikPakApi(
|
||||
username = username,
|
||||
password = password,
|
||||
httpx_client_args=httpx_client_args)
|
||||
|
||||
self.nodes : Dict[str, VirtFsNode] = {}
|
||||
self.loginCachePath = loginCachePath
|
||||
self.root = DirNode(None, "", None, [])
|
||||
self.currentLocation = self.root
|
||||
self.__LoginFromCache()
|
||||
|
||||
def __LoginFromCache(self):
|
||||
if self.loginCachePath == None:
|
||||
return
|
||||
if not os.path.exists(self.loginCachePath):
|
||||
return
|
||||
with open(self.loginCachePath, 'r', encoding='utf-8') as file:
|
||||
content = file.read()
|
||||
token = PikpakToken.from_json(content)
|
||||
if self.client.username != token.username or self.client.password != token.password:
|
||||
logging.error("failed to load login info from cache, not match")
|
||||
return
|
||||
self.client.access_token = token.access_token
|
||||
self.client.refresh_token = token.refresh_token
|
||||
self.client.user_id = token.user_id
|
||||
self.client.encode_token()
|
||||
logging.info("successfully load login info from cache")
|
||||
|
||||
def __DumpLoginInfo(self):
|
||||
if self.loginCachePath == None:
|
||||
return
|
||||
with open(self.loginCachePath, 'w', encoding='utf-8') as file:
|
||||
token = PikpakToken(self.client.username, self.client.password, self.client.access_token, self.client.refresh_token, self.client.user_id)
|
||||
file.write(token.to_json())
|
||||
logging.info("successfully dump login info to cache")
|
||||
|
||||
async def __RefreshAccessToken(self):
|
||||
result = await self.client.refresh_access_token()
|
||||
return json.dumps(result, indent=4)
|
||||
|
||||
async def __RefreshDirectory(self, dirNode : DirNode):
|
||||
dirInfo = await self.client.file_list(parent_id = dirNode.id)
|
||||
nodes = dirInfo["files"]
|
||||
dirNode.childrenId.clear()
|
||||
|
||||
for node in nodes:
|
||||
child : VirtFsNode = None
|
||||
id = node["id"]
|
||||
name = node["name"]
|
||||
|
||||
if id in self.nodes:
|
||||
child = self.nodes[id]
|
||||
else:
|
||||
if node["kind"].endswith("folder"):
|
||||
child = DirNode(id, name, dirNode.id, [])
|
||||
else:
|
||||
child = FileNode(id, name, dirNode.id)
|
||||
self.nodes[id] = child
|
||||
|
||||
child.name = name
|
||||
dirNode.childrenId.append(id)
|
||||
|
||||
dirNode.lastUpdate = datetime.now()
|
||||
|
||||
async def __PathToNode(self, pathStr : str, subDir : str = None) -> VirtFsNode:
|
||||
pathWalker = PathWalker(pathStr, subDir)
|
||||
current : VirtFsNode = None
|
||||
if pathWalker.IsAbsolute():
|
||||
current = self.root
|
||||
else:
|
||||
current = self.currentLocation
|
||||
|
||||
for spot in pathWalker.pathSpots:
|
||||
if current == None:
|
||||
break
|
||||
if spot == "..":
|
||||
if current.fatherId == None:
|
||||
current = self.root
|
||||
else:
|
||||
current = self.nodes[current.fatherId]
|
||||
continue
|
||||
|
||||
if not isinstance(current, DirNode):
|
||||
return None
|
||||
|
||||
currentDir : DirNode = current
|
||||
if currentDir.lastUpdate == None:
|
||||
await self.__RefreshDirectory(currentDir)
|
||||
|
||||
if spot == ".":
|
||||
continue
|
||||
else:
|
||||
current = None
|
||||
for childId in currentDir.childrenId:
|
||||
node = self.nodes[childId]
|
||||
if spot == node.name:
|
||||
current = node
|
||||
break
|
||||
|
||||
return current
|
||||
|
||||
async def __NodeToPath(self, node : VirtFsNode) -> str:
|
||||
spots : list[str] = [""]
|
||||
current = node
|
||||
while current.id != None:
|
||||
spots.append(current.name)
|
||||
if current.fatherId == None:
|
||||
break
|
||||
current = self.nodes[current.fatherId]
|
||||
spots.append("")
|
||||
return "/".join(reversed(spots))
|
||||
|
||||
async def login(self):
|
||||
result = await self.client.login()
|
||||
self.__DumpLoginInfo()
|
||||
logging.debug(json.dumps(result, indent=4))
|
||||
return "Login Success"
|
||||
|
||||
async def ls(self, pathStr : str = "") -> str:
|
||||
node = await self.__PathToNode(pathStr)
|
||||
if node == None:
|
||||
return f"path not found: {pathStr}"
|
||||
if not isinstance(node, DirNode):
|
||||
return f"path is not directory"
|
||||
dirNode : DirNode = node
|
||||
result = ["==== ls ===="]
|
||||
for childId in dirNode.childrenId:
|
||||
node = self.nodes[childId]
|
||||
result.append(node.name)
|
||||
return "\n".join(result)
|
||||
|
||||
async def cd(self, pathStr : str = "") -> str:
|
||||
node = await self.__PathToNode(pathStr)
|
||||
if node == None:
|
||||
return f"path not found: {pathStr}"
|
||||
if not isinstance(node, DirNode):
|
||||
return f"path is not directory"
|
||||
dirNode : DirNode = node
|
||||
self.currentLocation = dirNode
|
||||
return ""
|
||||
|
||||
async def cwd(self) -> str:
|
||||
path = await self.__NodeToPath(self.currentLocation)
|
||||
if path == None:
|
||||
return f"cwd failed"
|
||||
return path
|
||||
|
||||
async def geturl(self, pathStr : str) -> str:
|
||||
node = await self.__PathToNode(pathStr)
|
||||
if node == None:
|
||||
return f"path not found: {pathStr}"
|
||||
if not isinstance(node, FileNode):
|
||||
return f"path is not file"
|
||||
result = await self.client.get_download_url(node.id)
|
||||
logging.debug(json.dumps(result, indent=4))
|
||||
return result["web_content_link"]
|
||||
|
||||
async def offdown(self, url : str, pathStr : str = "") -> str :
|
||||
node = await self.__PathToNode(pathStr)
|
||||
if node == None:
|
||||
return f"path not found: {pathStr}"
|
||||
elif not isinstance(node, DirNode):
|
||||
return f"path is not directory"
|
||||
|
||||
subFolderName = self.__CalcMd5(url)
|
||||
subNode = await self.__PathToNode(pathStr, subFolderName)
|
||||
if subNode == None:
|
||||
result = await self.client.create_folder(subFolderName, node.id)
|
||||
logging.debug(json.dumps(result, indent=4))
|
||||
await self.__RefreshDirectory(node)
|
||||
subNode = await self.__PathToNode(pathStr, subFolderName)
|
||||
elif not isinstance(subNode, DirNode):
|
||||
return f"path is not directory"
|
||||
|
||||
if subNode == None:
|
||||
return f"path not found: {pathStr}"
|
||||
elif not isinstance(subNode, DirNode):
|
||||
return f"path is not directory"
|
||||
|
||||
result = await self.client.offline_download(url, subNode.id)
|
||||
logging.debug(json.dumps(result, indent=4))
|
||||
|
||||
return subFolderName
|
||||
|
||||
|
||||
async def HandlerCommand(self, command):
|
||||
result = re.findall(r'"(.*?)"|(\S+)', command)
|
||||
filtered_result = [item for sublist in result for item in sublist if item]
|
||||
|
||||
command = filtered_result[0]
|
||||
args = filtered_result[1:]
|
||||
|
||||
method = getattr(self, command)
|
||||
if method == None:
|
||||
return f"Unknown command: {command}"
|
||||
return await method(*args)
|
25
requirements.txt
Normal file
25
requirements.txt
Normal file
@ -0,0 +1,25 @@
|
||||
anyio==4.6.2.post1
|
||||
certifi==2024.8.30
|
||||
charset-normalizer==3.4.0
|
||||
DataRecorder==3.6.2
|
||||
DownloadKit==2.0.5
|
||||
et-xmlfile==1.1.0
|
||||
h11==0.14.0
|
||||
httpcore==1.0.6
|
||||
httpx==0.27.2
|
||||
idna==3.10
|
||||
linkify-it-py==2.0.3
|
||||
markdown-it-py==3.0.0
|
||||
mdit-py-plugins==0.4.2
|
||||
mdurl==0.1.2
|
||||
openpyxl==3.1.5
|
||||
PikPakAPI==0.1.10
|
||||
platformdirs==4.3.6
|
||||
Pygments==2.18.0
|
||||
requests==2.32.3
|
||||
rich==13.9.2
|
||||
sniffio==1.3.1
|
||||
textual==0.83.0
|
||||
typing_extensions==4.12.2
|
||||
uc-micro-py==1.0.3
|
||||
urllib3==2.2.3
|
Loading…
x
Reference in New Issue
Block a user