upload docker

This commit is contained in:
ouczb 2023-07-30 09:37:30 +08:00
commit 6369d1af4f
39 changed files with 2512 additions and 0 deletions

2
.gitignore vendored Normal file
View File

@ -0,0 +1,2 @@
.ipynb_checkpoints
/**/.ipynb_checkpoints

3
.gitmodules vendored Normal file
View File

@ -0,0 +1,3 @@
[submodule "anki/anki-sync-server"]
path = anki/anki-sync-server
url = git@175.24.226.114:ouczbs/anki-sync-server.git

10
anki/Dockerfile Normal file
View File

@ -0,0 +1,10 @@
FROM python:3.10-slim
WORKDIR /app/anki
COPY anki-env /app/anki
ENV XDG_CONFIG_HOME=/data/anki
RUN cd /app/anki/shell/ && chmod 777 build.sh && chmod 777 start.sh
RUN /app/anki/shell/build.sh
COPY anki-sync-server /app/anki/anki-sync-server
CMD /app/anki/shell/start.sh
EXPOSE 27701
#CMD tail -f /dev/null

View File

@ -0,0 +1,20 @@
[sync_app]
# change to 127.0.0.1 if you don't want the server to be accessible from the internet
host = 0.0.0.0
port = 27001
data_root = /data/anki/collections
base_url = /sync/
base_media_url = /msync/
auth_db_path = /data/anki/auth.db
# optional, for session persistence between restarts
session_db_path = /data/anki/session.db
# optional, for overriding the default managers and wrappers
# # must inherit from ankisyncd.full_sync.FullSyncManager, e.g,
# full_sync_manager = great_stuff.postgres.PostgresFullSyncManager
# # must inherit from ankisyncd.session.SimpleSessionManager, e.g,
# session_manager = great_stuff.postgres.PostgresSessionManager
# # must inherit from ankisyncd.users.SimpleUserManager, e.g,
# user_manager = great_stuff.postgres.PostgresUserManager
# # must inherit from ankisyncd.collection.CollectionWrapper, e.g,
# collection_wrapper = great_stuff.postgres.PostgresCollectionWrapper

BIN
anki/anki-env/conf/data.txt Normal file

Binary file not shown.

View File

@ -0,0 +1,24 @@
# THE FILE WAS GENERATED BY POETRY, DO NOT EDIT!
anki==2.1.49 ; python_version >= "3.8" and python_version < "4.0"
beautifulsoup4==4.12.2 ; python_version >= "3.8" and python_version < "4.0"
certifi==2023.5.7 ; python_version >= "3.8" and python_version < "4.0"
charset-normalizer==3.2.0 ; python_version >= "3.8" and python_version < "4.0"
decorator==4.4.2 ; python_version >= "3.8" and python_version < "4.0"
distro==1.8.0 ; python_version >= "3.8" and python_version < "4.0"
idna==3.4 ; python_version >= "3.8" and python_version < "4.0"
importlib-metadata==6.8.0 ; python_version >= "3.8" and python_version < "3.10"
markdown==3.3.7 ; python_version >= "3.8" and python_version < "4.0"
orjson==3.9.2 ; python_version >= "3.8" and python_version < "4.0" and platform_machine == "x86_64"
protobuf==3.20.2 ; python_version >= "3.8" and python_version < "4.0"
psutil==5.9.5 ; python_version >= "3.8" and python_version < "4.0"
pysocks==1.7.1 ; python_version >= "3.8" and python_version < "4.0"
requests==2.31.0 ; python_version >= "3.8" and python_version < "4.0"
requests[socks]==2.31.0 ; python_version >= "3.8" and python_version < "4.0"
send2trash==1.8.2 ; python_version >= "3.8" and python_version < "4.0"
soupsieve==2.4.1 ; python_version >= "3.8" and python_version < "4.0"
stringcase==1.2.0 ; python_version >= "3.8" and python_version < "4.0"
urllib3==2.0.3 ; python_version >= "3.8" and python_version < "4.0"
webob==1.8.7 ; python_version >= "3.8" and python_version < "4.0"
zipp==3.16.1 ; python_version >= "3.8" and python_version < "3.10"

View File

@ -0,0 +1,3 @@
#pip install --upgrade pip
pip install -r ./conf/requirements.txt
pip install pyzstd

View File

@ -0,0 +1,47 @@
py=`python --version`
echo $py
ankiconf="ankisyncd.conf"
ankidir="/data/anki/"
ankisyncd="ankisyncd/"
auth="auth.db"
collection="collections"
if [ ! -d $ankidir ];then
mkdir $ankidir
echo "create $ankidir"
else
echo "exists $ankidir"
fi
if [ ! -d $ankidir$ankisyncd ];then
mkdir $ankidir$ankisyncd
echo "create $ankidir$ankisyncd"
else
echo "exists $ankidir$ankisyncd"
fi
if [ -f $ankidir$ankiconf ]; then
echo "$ankiconf found"
else
echo "Creating new configuration file: $ankiconf."
cp ./conf/$ankiconf $ankidir$ankiconf
fi
if [ -f $ankidir$ankisyncd$ankiconf ]; then
echo "$ankisyncd$ankiconf found"
else
echo "ln -s ankisyncd"
ln -s $ankidir$ankiconf $ankidir$ankisyncd
fi
if [ -f $ankidir$auth ]; then
echo "$auth found"
else
echo "Creating new authentication database: $auth."
sqlite3 $ankidir$auth 'CREATE TABLE auth (username VARCHAR PRIMARY KEY, hash VARCHAR)'
fi
if [ -d $ankidir$collection ]; then
echo "$collection directory exists"
else
echo "Creating collections directory: $collection."
mkdir $ankidir$collection
fi
echo start running ankisyncd
cd ./anki-sync-server/src
python -m ankisyncd

341
anki/anki.ipynb Normal file
View File

@ -0,0 +1,341 @@
{
"cells": [
{
"cell_type": "code",
"execution_count": 1,
"id": "d875f67a",
"metadata": {},
"outputs": [
{
"name": "stdout",
"output_type": "stream",
"text": [
"Looking in indexes: http://mirrors.tencentyun.com/pypi/simple\n",
"Ignoring importlib-metadata: markers 'python_version >= \"3.8\" and python_version < \"3.10\"' don't match your environment\n",
"Ignoring zipp: markers 'python_version >= \"3.8\" and python_version < \"3.10\"' don't match your environment\n",
"Collecting anki==2.1.49 (from -r ./conf/requirements.txt (line 4))\n",
" Downloading http://mirrors.tencentyun.com/pypi/packages/b8/e7/442dc5e500a95ddb91b1e6d90c510f799d672059cc904558a196f1923280/anki-2.1.49-cp38-abi3-manylinux2014_x86_64.whl (9.9 MB)\n",
"\u001b[2K \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m9.9/9.9 MB\u001b[0m \u001b[31m5.0 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m00:01\u001b[0m00:01\u001b[0m0m\n",
"\u001b[?25hRequirement already satisfied: beautifulsoup4==4.12.2 in /root/docker/miniconda3/lib/python3.11/site-packages (from -r ./conf/requirements.txt (line 5)) (4.12.2)\n",
"Collecting certifi==2023.5.7 (from -r ./conf/requirements.txt (line 6))\n",
" Downloading http://mirrors.tencentyun.com/pypi/packages/9d/19/59961b522e6757f0c9097e4493fa906031b95b3ebe9360b2c3083561a6b4/certifi-2023.5.7-py3-none-any.whl (156 kB)\n",
"\u001b[2K \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m157.0/157.0 kB\u001b[0m \u001b[31m361.0 kB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0ma \u001b[36m0:00:01\u001b[0m\n",
"\u001b[?25hCollecting charset-normalizer==3.2.0 (from -r ./conf/requirements.txt (line 7))\n",
" Downloading http://mirrors.tencentyun.com/pypi/packages/bc/85/ef25d4ba14c7653c3020a1c6e1a7413e6791ef36a0ac177efa605fc2c737/charset_normalizer-3.2.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl (199 kB)\n",
"\u001b[2K \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m199.6/199.6 kB\u001b[0m \u001b[31m3.9 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0ma \u001b[36m0:00:01\u001b[0m\n",
"\u001b[?25hCollecting decorator==4.4.2 (from -r ./conf/requirements.txt (line 8))\n",
" Downloading http://mirrors.tencentyun.com/pypi/packages/ed/1b/72a1821152d07cf1d8b6fce298aeb06a7eb90f4d6d41acec9861e7cc6df0/decorator-4.4.2-py2.py3-none-any.whl (9.2 kB)\n",
"Collecting distro==1.8.0 (from -r ./conf/requirements.txt (line 9))\n",
" Downloading http://mirrors.tencentyun.com/pypi/packages/f4/2c/c90a3adaf0ddb70afe193f5ebfb539612af57cffe677c3126be533df3098/distro-1.8.0-py3-none-any.whl (20 kB)\n",
"Requirement already satisfied: idna==3.4 in /root/docker/miniconda3/lib/python3.11/site-packages (from -r ./conf/requirements.txt (line 10)) (3.4)\n",
"Collecting markdown==3.3.7 (from -r ./conf/requirements.txt (line 12))\n",
" Downloading http://mirrors.tencentyun.com/pypi/packages/f3/df/ca72f352e15b6f8ce32b74af029f1189abffb906f7c137501ffe69c98a65/Markdown-3.3.7-py3-none-any.whl (97 kB)\n",
"\u001b[2K \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m97.8/97.8 kB\u001b[0m \u001b[31m397.4 kB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0ma \u001b[36m0:00:01\u001b[0m\n",
"\u001b[?25hCollecting orjson==3.9.2 (from -r ./conf/requirements.txt (line 13))\n",
" Downloading http://mirrors.tencentyun.com/pypi/packages/b4/7e/12240e7d30d3c8a0abb13fad9f016ac051594c2219d407af1fc7d462ead0/orjson-3.9.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl (138 kB)\n",
"\u001b[2K \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m138.7/138.7 kB\u001b[0m \u001b[31m488.6 kB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0ma \u001b[36m0:00:01\u001b[0m\n",
"\u001b[?25hCollecting protobuf==3.20.2 (from -r ./conf/requirements.txt (line 14))\n",
" Downloading http://mirrors.tencentyun.com/pypi/packages/8b/e6/2a47ce2eba1aaf287380a44270da897ada03d118a55c19595ec7b4f0831f/protobuf-3.20.2-py2.py3-none-any.whl (162 kB)\n",
"\u001b[2K \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m162.1/162.1 kB\u001b[0m \u001b[31m4.9 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m\n",
"\u001b[?25hCollecting psutil==5.9.5 (from -r ./conf/requirements.txt (line 15))\n",
" Downloading http://mirrors.tencentyun.com/pypi/packages/af/4d/389441079ecef400e2551a3933224885a7bde6b8a4810091d628cdd75afe/psutil-5.9.5-cp36-abi3-manylinux_2_12_x86_64.manylinux2010_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl (282 kB)\n",
"\u001b[2K \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m282.1/282.1 kB\u001b[0m \u001b[31m5.4 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0ma \u001b[36m0:00:01\u001b[0m\n",
"\u001b[?25hRequirement already satisfied: pysocks==1.7.1 in /root/docker/miniconda3/lib/python3.11/site-packages (from -r ./conf/requirements.txt (line 16)) (1.7.1)\n",
"Collecting requests==2.31.0 (from -r ./conf/requirements.txt (line 17))\n",
" Downloading http://mirrors.tencentyun.com/pypi/packages/70/8e/0e2d847013cb52cd35b38c009bb167a1a26b2ce6cd6965bf26b47bc0bf44/requests-2.31.0-py3-none-any.whl (62 kB)\n",
"\u001b[2K \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m62.6/62.6 kB\u001b[0m \u001b[31m423.2 kB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0ma \u001b[36m0:00:01\u001b[0m\n",
"\u001b[?25hCollecting send2trash==1.8.2 (from -r ./conf/requirements.txt (line 19))\n",
" Downloading http://mirrors.tencentyun.com/pypi/packages/a9/78/e4df1e080ed790acf3a704edf521006dd96b9841bd2e2a462c0d255e0565/Send2Trash-1.8.2-py3-none-any.whl (18 kB)\n",
"Collecting soupsieve==2.4.1 (from -r ./conf/requirements.txt (line 20))\n",
" Downloading http://mirrors.tencentyun.com/pypi/packages/49/37/673d6490efc51ec46d198c75903d99de59baffdd47aea3d071b80a9e4e89/soupsieve-2.4.1-py3-none-any.whl (36 kB)\n",
"Collecting stringcase==1.2.0 (from -r ./conf/requirements.txt (line 21))\n",
" Downloading http://mirrors.tencentyun.com/pypi/packages/f3/1f/1241aa3d66e8dc1612427b17885f5fcd9c9ee3079fc0d28e9a3aeeb36fa3/stringcase-1.2.0.tar.gz (3.0 kB)\n",
" Preparing metadata (setup.py) ... \u001b[?25ldone\n",
"\u001b[?25hCollecting urllib3==2.0.3 (from -r ./conf/requirements.txt (line 22))\n",
" Downloading http://mirrors.tencentyun.com/pypi/packages/8a/03/ad9306a50d05c166e3456fe810f33cee2b8b2a7a6818ec5d4908c4ec6b36/urllib3-2.0.3-py3-none-any.whl (123 kB)\n",
"\u001b[2K \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m123.6/123.6 kB\u001b[0m \u001b[31m3.8 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m\n",
"\u001b[?25hCollecting webob==1.8.7 (from -r ./conf/requirements.txt (line 23))\n",
" Downloading http://mirrors.tencentyun.com/pypi/packages/62/9c/e94a9982e9f31fc35cf46cdc543a6c2c26cb7174635b5fd25b0bbc6a7bc0/WebOb-1.8.7-py2.py3-none-any.whl (114 kB)\n",
"\u001b[2K \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m115.0/115.0 kB\u001b[0m \u001b[31m3.4 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m\n",
"\u001b[?25hBuilding wheels for collected packages: stringcase\n",
" Building wheel for stringcase (setup.py) ... \u001b[?25ldone\n",
"\u001b[?25h Created wheel for stringcase: filename=stringcase-1.2.0-py3-none-any.whl size=3569 sha256=d7e04c98d243b1bf594cbf2fd73bf04511a7406867ee7d3b695a9c6413b412fe\n",
" Stored in directory: /root/.cache/pip/wheels/4c/0c/49/074fffd81682caa8ae2d650567d819a43ee7f25eacce298fea\n",
"Successfully built stringcase\n",
"Installing collected packages: stringcase, webob, urllib3, soupsieve, send2trash, psutil, protobuf, orjson, markdown, distro, decorator, charset-normalizer, certifi, requests, anki\n",
" Attempting uninstall: urllib3\n",
" Found existing installation: urllib3 1.26.16\n",
" Uninstalling urllib3-1.26.16:\n",
" Successfully uninstalled urllib3-1.26.16\n",
" Attempting uninstall: soupsieve\n",
" Found existing installation: soupsieve 2.4\n",
" Uninstalling soupsieve-2.4:\n",
" Successfully uninstalled soupsieve-2.4\n",
" Attempting uninstall: send2trash\n",
" Found existing installation: Send2Trash 1.8.0\n",
" Uninstalling Send2Trash-1.8.0:\n",
" Successfully uninstalled Send2Trash-1.8.0\n",
" Attempting uninstall: psutil\n",
" Found existing installation: psutil 5.9.0\n",
" Uninstalling psutil-5.9.0:\n",
" Successfully uninstalled psutil-5.9.0\n",
" Attempting uninstall: decorator\n",
" Found existing installation: decorator 5.1.1\n",
" Uninstalling decorator-5.1.1:\n",
" Successfully uninstalled decorator-5.1.1\n",
" Attempting uninstall: charset-normalizer\n",
" Found existing installation: charset-normalizer 2.0.4\n",
" Uninstalling charset-normalizer-2.0.4:\n",
" Successfully uninstalled charset-normalizer-2.0.4\n",
" Attempting uninstall: certifi\n",
" Found existing installation: certifi 2023.7.22\n",
" Uninstalling certifi-2023.7.22:\n",
" Successfully uninstalled certifi-2023.7.22\n",
" Attempting uninstall: requests\n",
" Found existing installation: requests 2.29.0\n",
" Uninstalling requests-2.29.0:\n",
" Successfully uninstalled requests-2.29.0\n",
"Successfully installed anki-2.1.49 certifi-2023.5.7 charset-normalizer-3.2.0 decorator-4.4.2 distro-1.8.0 markdown-3.3.7 orjson-3.9.2 protobuf-3.20.2 psutil-5.9.5 requests-2.31.0 send2trash-1.8.2 soupsieve-2.4.1 stringcase-1.2.0 urllib3-2.0.3 webob-1.8.7\n",
"\u001b[33mWARNING: Running pip as the 'root' user can result in broken permissions and conflicting behaviour with the system package manager. It is recommended to use a virtual environment instead: https://pip.pypa.io/warnings/venv\u001b[0m\u001b[33m\n"
]
},
{
"name": "stdout",
"output_type": "stream",
"text": [
"\u001b[0mLooking in indexes: http://mirrors.tencentyun.com/pypi/simple\n",
"Collecting pyzstd\n",
" Downloading http://mirrors.tencentyun.com/pypi/packages/c3/7f/0d5c048cacb906fa40485b5bb9dc5de5993bff2e4c85fa77e5a89c413205/pyzstd-0.15.9-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl (412 kB)\n",
"\u001b[2K \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m412.3/412.3 kB\u001b[0m \u001b[31m6.1 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0ma \u001b[36m0:00:01\u001b[0m\n",
"\u001b[?25hInstalling collected packages: pyzstd\n",
"Successfully installed pyzstd-0.15.9\n",
"\u001b[33mWARNING: Running pip as the 'root' user can result in broken permissions and conflicting behaviour with the system package manager. It is recommended to use a virtual environment instead: https://pip.pypa.io/warnings/venv\u001b[0m\u001b[33m\n",
"\u001b[0m"
]
}
],
"source": [
"!cd ./anki-env/ && bash shell/build.sh\n",
"#!bash ./shell/start.sh"
]
},
{
"cell_type": "code",
"execution_count": 1,
"id": "c5c4b4ea",
"metadata": {},
"outputs": [],
"source": [
"import os\n",
"root = os.getcwd()\n",
"os.chdir(root + \"/anki-sync-server/src\")\n",
"#os.chdir(root)"
]
},
{
"cell_type": "code",
"execution_count": 2,
"id": "ab7f33f9",
"metadata": {},
"outputs": [
{
"name": "stderr",
"output_type": "stream",
"text": [
"[2023-07-29 02:37:39,550]:INFO:ankisyncd.config:Loaded config from /data/anki/ankisyncd/ankisyncd.conf\n",
"[2023-07-29 02:37:39,551]:INFO:ankisyncd.users:Found auth_db_path in config, using SqliteUserManager for auth\n"
]
},
{
"name": "stdout",
"output_type": "stream",
"text": [
"anki\n"
]
}
],
"source": [
"import sys\n",
"import getpass\n",
"from ankisyncd import config as config_provider\n",
"from ankisyncd.users import get_user_manager\n",
"config = config_provider.load_from_file()\n",
"config_provider.load_from_env(config)\n",
"username = \"anki\"\n",
"password = \"ouczbs\"\n",
"user_manager = get_user_manager(config)\n",
"# user_manager.add_user(username, password)\n",
"users = user_manager.user_list()\n",
"for username in users:\n",
" print(username)"
]
},
{
"cell_type": "code",
"execution_count": null,
"id": "c218d033",
"metadata": {
"scrolled": true
},
"outputs": [
{
"name": "stderr",
"output_type": "stream",
"text": [
"[2023-07-29 02:37:41,526]:INFO:ankisyncd:ankisyncd [unknown version] (https://github.com/ankicommunity/anki-sync-server.git)\n",
"[2023-07-29 02:37:41,527]:INFO:ankisyncd.config:Loaded config from /data/anki/ankisyncd/ankisyncd.conf\n",
"[2023-07-29 02:37:41,528]:INFO:ankisyncd.users:Found auth_db_path in config, using SqliteUserManager for auth\n",
"[2023-07-29 02:37:41,530]:INFO:ankisyncd.sessions:Found session_db_path in config, using SqliteSessionManager for auth\n",
"[2023-07-29 02:37:41,538]:INFO:ankisyncd.server:Serving HTTP on 0.0.0.0 port 27001...\n",
"[2023-07-29 02:38:09,209]:INFO:ankisyncd:>>>>>::request body or size: {'data': b'{\"v\":11,\"cv\":\"anki,2.1.64 (581f82c5),win:10\"}', 'v': 11, 'k': 'bc11cda8e476ff59fcb38b0472d761bf', 'c': '2.1.64,581f82c5,windows', 's': 'beb5Tq'}\n",
"[2023-07-29 02:38:09,219]:INFO:ankisyncd.CollectionThread[anki]:Starting...\n",
"[2023-07-29 02:38:09,222]:INFO:ankisyncd.CollectionThread[anki]:Running meta(*[], **{'v': 11, 'cv': 'anki,2.1.64 (581f82c5),win:10'})\n",
"[2023-07-29 02:38:09,269]:INFO:ankisyncd:<<<<<::response body or size: b'{\"mod\": 1690598178633, \"scm\": 1690470330406, \"usn\": 15, \"ts\": 1690598289, \"musn\": 0, \"uname\": \"anki\", \"msg\": \"\", \"cont\": true, \"hostNum\": 0}' compress size117\n",
"[2023-07-29 02:38:09,270]:INFO:ankisyncd.http:172.18.0.4 \"POST /sync/meta HTTP/1.0\" 200 117\n",
"[2023-07-29 02:38:09,272]:INFO:ankisyncd:>>>>>::request body or size: {'data': b'{\"v\":\"anki,2.1.64 (581f82c5),win:10\"}', 'v': 11, 'k': 'bc11cda8e476ff59fcb38b0472d761bf', 'c': '2.1.64,581f82c5,windows', 's': 'uauMP'}\n",
"[2023-07-29 02:38:09,273]:INFO:ankisyncd.CollectionThread[anki]:Running begin(*[], **{'skey': 'e0386f91'})\n",
"[2023-07-29 02:38:09,274]:INFO:ankisyncd:<<<<<::response body or size: b'{\"data\": {\"sk\": \"e0386f91\", \"usn\": 0}, \"err\": \"\"}' compress size58\n",
"[2023-07-29 02:38:09,274]:INFO:ankisyncd.http:172.18.0.4 \"POST /msync/begin HTTP/1.0\" 200 58\n",
"[2023-07-29 02:38:09,281]:INFO:ankisyncd:>>>>>::request body or size: {'data': b'{\"minUsn\":15,\"lnewer\":true,\"graves\":null}', 'v': 11, 'k': 'bc11cda8e476ff59fcb38b0472d761bf', 'c': '2.1.64,581f82c5,windows', 's': 'beb5Tq'}\n",
"[2023-07-29 02:38:09,282]:INFO:ankisyncd.CollectionThread[anki]:Running start(*[], **{'minUsn': 15, 'lnewer': True, 'graves': None})\n",
"[2023-07-29 02:38:09,285]:INFO:ankisyncd:<<<<<::response body or size: b'{\"cards\": [], \"notes\": [], \"decks\": []}' compress size45\n",
"[2023-07-29 02:38:09,286]:INFO:ankisyncd.http:172.18.0.4 \"POST /sync/start HTTP/1.0\" 200 45\n",
"[2023-07-29 02:38:09,296]:INFO:ankisyncd:>>>>>::request body or size: {'data': b'{\"changes\":{\"models\":[],\"decks\":[[],[]],\"tags\":[],\"conf\":{\"backups\":{\"daily\":12,\"minimum_interval_mins\":30,\"monthly\":9,\"weekly\":10},\"_deck_1_lastNotetype\":1690470330408,\"_nt_1690470330408_lastDeck\":1,\"lastUnburied\":2,\"newSpread\":0,\"nextPos\":4,\"defaultSearchText\":\"\",\"ignoreAccentsInSearch\":false,\"localOffset\":-480,\"pasteImagesAsPng\":false,\"pasteStripsFormatting\":false,\"rollover\":4,\"schedVer\":2,\"dueCounts\":true,\"curModel\":1690470330408,\"collapseTime\":1200,\"activeDecks\":[1690564414917],\"curDeck\":1690564414917,\"addToCur\":true,\"estTimes\":true,\"hideAudioPlayButtons\":false,\"newBury\":true,\"sortType\":\"noteFld\",\"sortBackwards\":false,\"timeLim\":0,\"dayLearnFirst\":false,\"interruptAudioWhenAnswering\":true},\"crt\":1690430400}}', 'v': 11, 'k': 'bc11cda8e476ff59fcb38b0472d761bf', 'c': '2.1.64,581f82c5,windows', 's': 'beb5Tq'}\n",
"[2023-07-29 02:38:09,297]:INFO:ankisyncd.CollectionThread[anki]:Running applyChanges(*[], **{'changes': {'models': [], 'decks': [[], []], 'tags': [], 'conf': {'backups': {'daily': 12, 'minimum_interval_mins': 30, 'monthly': 9, 'weekly': 10}, '_deck_1_lastNotetype': 1690470330408, '_nt_1690470330408_lastDeck': 1, 'lastUnburied': 2, 'newSpread': 0, 'nextPos': 4, 'defaultSearchText': '', 'ignoreAccentsInSearch': False, 'localOffset': -480, 'pasteImagesAsPng': False, 'pasteStripsFormatting': False, 'rollover': 4, 'schedVer': 2, 'dueCounts': True, 'curModel': 1690470330408, 'collapseTime': 1200, 'activeDecks': [1690564414917], 'curDeck': 1690564414917, 'addToCur': True, 'estTimes': True, 'hideAudioPlayButtons': False, 'newBury': True, 'sortType': 'noteFld', 'sortBackwards': False, 'timeLim': 0, 'dayLearnFirst': False, 'interruptAudioWhenAnswering': True}, 'crt': 1690430400}})\n",
"[2023-07-29 02:38:09,313]:INFO:ankisyncd:<<<<<::response body or size: b'{\"models\": [], \"decks\": [[], []], \"tags\": []}' compress size54\n",
"[2023-07-29 02:38:09,314]:INFO:ankisyncd.http:172.18.0.4 \"POST /sync/applyChanges HTTP/1.0\" 200 54\n",
"[2023-07-29 02:38:09,325]:INFO:ankisyncd:>>>>>::request body or size: {'data': b'{\"_pad\":null}', 'v': 11, 'k': 'bc11cda8e476ff59fcb38b0472d761bf', 'c': '2.1.64,581f82c5,windows', 's': 'beb5Tq'}\n",
"[2023-07-29 02:38:09,327]:INFO:ankisyncd.CollectionThread[anki]:Running chunk(*[], **{})\n",
"[2023-07-29 02:38:09,332]:INFO:ankisyncd:<<<<<::response body or size: b'{\"done\": true, \"notes\": [], \"cards\": [], \"revlog\": []}' compress size63\n",
"[2023-07-29 02:38:09,334]:INFO:ankisyncd.http:172.18.0.4 \"POST /sync/chunk HTTP/1.0\" 200 63\n",
"[2023-07-29 02:38:09,343]:INFO:ankisyncd:>>>>>::request body or size: {'data': b'{\"chunk\":{\"done\":true}}', 'v': 11, 'k': 'bc11cda8e476ff59fcb38b0472d761bf', 'c': '2.1.64,581f82c5,windows', 's': 'beb5Tq'}\n",
"[2023-07-29 02:38:09,344]:INFO:ankisyncd.CollectionThread[anki]:Running applyChunk(*[], **{'chunk': {'done': True}})\n",
"[2023-07-29 02:38:09,346]:INFO:ankisyncd:<<<<<::response body or size: b'null' compress size13\n",
"[2023-07-29 02:38:09,348]:INFO:ankisyncd.http:172.18.0.4 \"POST /sync/applyChunk HTTP/1.0\" 200 13\n",
"[2023-07-29 02:38:09,357]:INFO:ankisyncd:>>>>>::request body or size: {'data': b'{\"client\":[[0,0,0],48,48,1,6,6,2,1]}', 'v': 11, 'k': 'bc11cda8e476ff59fcb38b0472d761bf', 'c': '2.1.64,581f82c5,windows', 's': 'beb5Tq'}\n",
"[2023-07-29 02:38:09,358]:INFO:ankisyncd.CollectionThread[anki]:Running sanityCheck2(*[], **{'client': [[0, 0, 0], 48, 48, 1, 6, 6, 2, 1]})\n",
"[2023-07-29 02:38:09,361]:INFO:ankisyncd:<<<<<::response body or size: b'{\"status\": \"ok\"}' compress size25\n",
"[2023-07-29 02:38:09,362]:INFO:ankisyncd.http:172.18.0.4 \"POST /sync/sanityCheck2 HTTP/1.0\" 200 25\n",
"[2023-07-29 02:38:09,371]:INFO:ankisyncd:>>>>>::request body or size: {'data': b'{\"_pad\":null}', 'v': 11, 'k': 'bc11cda8e476ff59fcb38b0472d761bf', 'c': '2.1.64,581f82c5,windows', 's': 'beb5Tq'}\n",
"[2023-07-29 02:38:09,372]:INFO:ankisyncd.CollectionThread[anki]:Running finish(*[], **{})\n",
"[2023-07-29 02:38:09,377]:INFO:ankisyncd:<<<<<::response body or size: b'1690598289373' compress size22\n",
"[2023-07-29 02:38:09,378]:INFO:ankisyncd.http:172.18.0.4 \"POST /sync/finish HTTP/1.0\" 200 22\n",
"[2023-07-29 02:38:15,011]:INFO:ankisyncd:>>>>>::request body or size: {'data': b'{\"v\":\"anki,2.1.64 (581f82c5),win:10\"}', 'v': 11, 'k': 'bc11cda8e476ff59fcb38b0472d761bf', 'c': '2.1.64,581f82c5,windows', 's': 'dEnCxC'}\n",
"[2023-07-29 02:38:15,013]:INFO:ankisyncd.CollectionThread[anki]:Running begin(*[], **{'skey': 'e0386f91'})\n",
"[2023-07-29 02:38:15,014]:INFO:ankisyncd:<<<<<::response body or size: b'{\"data\": {\"sk\": \"e0386f91\", \"usn\": 0}, \"err\": \"\"}' compress size58\n",
"[2023-07-29 02:38:15,015]:INFO:ankisyncd.http:172.18.0.4 \"POST /msync/begin HTTP/1.0\" 200 58\n",
"[2023-07-29 02:38:15,016]:INFO:ankisyncd:>>>>>::request body or size: {'data': b'{\"v\":11,\"cv\":\"anki,2.1.64 (581f82c5),win:10\"}', 'v': 11, 'k': 'bc11cda8e476ff59fcb38b0472d761bf', 'c': '2.1.64,581f82c5,windows', 's': 'ekPDWV'}\n",
"[2023-07-29 02:38:15,039]:INFO:ankisyncd.CollectionThread[anki]:Running meta(*[], **{'v': 11, 'cv': 'anki,2.1.64 (581f82c5),win:10'})\n",
"[2023-07-29 02:38:15,041]:INFO:ankisyncd:<<<<<::response body or size: b'{\"mod\": 1690598289373, \"scm\": 1690470330406, \"usn\": 16, \"ts\": 1690598295, \"musn\": 0, \"uname\": \"anki\", \"msg\": \"\", \"cont\": true, \"hostNum\": 0}' compress size115\n",
"[2023-07-29 02:38:15,042]:INFO:ankisyncd.http:172.18.0.4 \"POST /sync/meta HTTP/1.0\" 200 115\n",
"[2023-07-29 02:39:03,690]:INFO:ankisyncd:>>>>>::request body or size: {'data': b'{\"v\":11,\"cv\":\"anki,2.1.64 (581f82c5),win:10\"}', 'v': 11, 'k': 'bc11cda8e476ff59fcb38b0472d761bf', 'c': '2.1.64,581f82c5,windows', 's': 'WB9Ha'}\n",
"[2023-07-29 02:39:03,700]:INFO:ankisyncd.CollectionThread[anki]:Running meta(*[], **{'v': 11, 'cv': 'anki,2.1.64 (581f82c5),win:10'})\n",
"[2023-07-29 02:39:03,702]:INFO:ankisyncd:<<<<<::response body or size: b'{\"mod\": 1690598289373, \"scm\": 1690470330406, \"usn\": 16, \"ts\": 1690598343, \"musn\": 0, \"uname\": \"anki\", \"msg\": \"\", \"cont\": true, \"hostNum\": 0}' compress size115\n"
]
},
{
"name": "stderr",
"output_type": "stream",
"text": [
"[2023-07-29 02:39:03,704]:INFO:ankisyncd.http:172.18.0.4 \"POST /sync/meta HTTP/1.0\" 200 115\n",
"[2023-07-29 02:39:03,706]:INFO:ankisyncd:>>>>>::request body or size: {'data': b'{\"v\":\"anki,2.1.64 (581f82c5),win:10\"}', 'v': 11, 'k': 'bc11cda8e476ff59fcb38b0472d761bf', 'c': '2.1.64,581f82c5,windows', 's': 'dneLt1'}\n",
"[2023-07-29 02:39:03,708]:INFO:ankisyncd.CollectionThread[anki]:Running begin(*[], **{'skey': 'e0386f91'})\n",
"[2023-07-29 02:39:03,708]:INFO:ankisyncd:<<<<<::response body or size: b'{\"data\": {\"sk\": \"e0386f91\", \"usn\": 0}, \"err\": \"\"}' compress size58\n",
"[2023-07-29 02:39:03,709]:INFO:ankisyncd.http:172.18.0.4 \"POST /msync/begin HTTP/1.0\" 200 58\n",
"[2023-07-29 02:39:03,712]:INFO:ankisyncd:>>>>>::request body or size: {'data': b'{\"minUsn\":16,\"lnewer\":true,\"graves\":null}', 'v': 11, 'k': 'bc11cda8e476ff59fcb38b0472d761bf', 'c': '2.1.64,581f82c5,windows', 's': 'WB9Ha'}\n",
"[2023-07-29 02:39:03,713]:INFO:ankisyncd.CollectionThread[anki]:Running start(*[], **{'minUsn': 16, 'lnewer': True, 'graves': None})\n",
"[2023-07-29 02:39:03,714]:INFO:ankisyncd:<<<<<::response body or size: b'{\"cards\": [], \"notes\": [], \"decks\": []}' compress size45\n",
"[2023-07-29 02:39:03,715]:INFO:ankisyncd.http:172.18.0.4 \"POST /sync/start HTTP/1.0\" 200 45\n",
"[2023-07-29 02:39:03,724]:INFO:ankisyncd:>>>>>::request body or size: {'data': b'{\"changes\":{\"models\":[],\"decks\":[[{\"id\":1690564414917,\"mod\":1690598340,\"name\":\"\\xe8\\xb5\\x96\\xe4\\xb8\\x96\\xe9\\x9b\\x84\\xe9\\x9f\\xb3\\xe6\\xa0\\x87\",\"usn\":16,\"lrnToday\":[2,0],\"revToday\":[2,0],\"newToday\":[2,2],\"timeToday\":[2,18928],\"collapsed\":false,\"browserCollapsed\":false,\"desc\":\"Please see the <a href=\\'https://ankiweb.net/shared/info/745168042\\'>shared deck page</a> for more info.\",\"dyn\":0,\"conf\":1,\"extendNew\":0,\"extendRev\":0,\"reviewLimit\":null,\"newLimit\":null,\"reviewLimitToday\":null,\"newLimitToday\":null}],[]],\"tags\":[],\"conf\":{\"localOffset\":-480,\"collapseTime\":1200,\"pasteImagesAsPng\":false,\"sortBackwards\":false,\"newSpread\":0,\"lastUnburied\":2,\"rollover\":4,\"_nt_1690470330408_lastDeck\":1,\"timeLim\":0,\"newBury\":true,\"defaultSearchText\":\"\",\"backups\":{\"daily\":12,\"minimum_interval_mins\":30,\"monthly\":9,\"weekly\":10},\"curModel\":1690470330408,\"nextPos\":4,\"pasteStripsFormatting\":false,\"addToCur\":true,\"sortType\":\"noteFld\",\"curDeck\":1690564414917,\"dayLearnFirst\":false,\"hideAudioPlayButtons\":false,\"interruptAudioWhenAnswering\":true,\"ignoreAccentsInSearch\":false,\"schedVer\":2,\"_deck_1_lastNotetype\":1690470330408,\"activeDecks\":[1690564414917],\"dueCounts\":true,\"estTimes\":true},\"crt\":1690430400}}', 'v': 11, 'k': 'bc11cda8e476ff59fcb38b0472d761bf', 'c': '2.1.64,581f82c5,windows', 's': 'WB9Ha'}\n",
"[2023-07-29 02:39:03,725]:INFO:ankisyncd.CollectionThread[anki]:Running applyChanges(*[], **{'changes': {'models': [], 'decks': [[{'id': 1690564414917, 'mod': 1690598340, 'name': '赖世雄音标', 'usn': 16, 'lrnToday': [2, 0], 'revToday': [2, 0], 'newToday': [2, 2], 'timeToday': [2, 18928], 'collapsed': False, 'browserCollapsed': False, 'desc': \"Please see the <a href='https://ankiweb.net/shared/info/745168042'>shared deck page</a> for more info.\", 'dyn': 0, 'conf': 1, 'extendNew': 0, 'extendRev': 0, 'reviewLimit': None, 'newLimit': None, 'reviewLimitToday': None, 'newLimitToday': None}], []], 'tags': [], 'conf': {'localOffset': -480, 'collapseTime': 1200, 'pasteImagesAsPng': False, 'sortBackwards': False, 'newSpread': 0, 'lastUnburied': 2, 'rollover': 4, '_nt_1690470330408_lastDeck': 1, 'timeLim': 0, 'newBury': True, 'defaultSearchText': '', 'backups': {'daily': 12, 'minimum_interval_mins': 30, 'monthly': 9, 'weekly': 10}, 'curModel': 1690470330408, 'nextPos': 4, 'pasteStripsFormatting': False, 'addToCur': True, 'sortType': 'noteFld', 'curDeck': 1690564414917, 'dayLearnFirst': False, 'hideAudioPlayButtons': False, 'interruptAudioWhenAnswering': True, 'ignoreAccentsInSearch': False, 'schedVer': 2, '_deck_1_lastNotetype': 1690470330408, 'activeDecks': [1690564414917], 'dueCounts': True, 'estTimes': True}, 'crt': 1690430400}})\n",
"[2023-07-29 02:39:03,732]:INFO:ankisyncd:<<<<<::response body or size: b'{\"models\": [], \"decks\": [[], []], \"tags\": []}' compress size54\n",
"[2023-07-29 02:39:03,734]:INFO:ankisyncd.http:172.18.0.4 \"POST /sync/applyChanges HTTP/1.0\" 200 54\n",
"[2023-07-29 02:39:03,741]:INFO:ankisyncd:>>>>>::request body or size: {'data': b'{\"_pad\":null}', 'v': 11, 'k': 'bc11cda8e476ff59fcb38b0472d761bf', 'c': '2.1.64,581f82c5,windows', 's': 'WB9Ha'}\n",
"[2023-07-29 02:39:03,742]:INFO:ankisyncd.CollectionThread[anki]:Running chunk(*[], **{})\n",
"[2023-07-29 02:39:03,748]:INFO:ankisyncd:<<<<<::response body or size: b'{\"done\": true, \"notes\": [], \"cards\": [], \"revlog\": []}' compress size63\n",
"[2023-07-29 02:39:03,749]:INFO:ankisyncd.http:172.18.0.4 \"POST /sync/chunk HTTP/1.0\" 200 63\n",
"[2023-07-29 02:39:03,757]:INFO:ankisyncd:>>>>>::request body or size: {'data': b'{\"chunk\":{\"done\":true,\"revlog\":[[1690598340609,1535892745436,16,3,-600,-60,0,13610,0]],\"cards\":[[1535892745436,1535892745390,1690564414917,0,1690598340,16,1,1,1690598980,0,0,1,0,1001,0,0,0,\"{}\"]]}}', 'v': 11, 'k': 'bc11cda8e476ff59fcb38b0472d761bf', 'c': '2.1.64,581f82c5,windows', 's': 'WB9Ha'}\n",
"[2023-07-29 02:39:03,758]:INFO:ankisyncd.CollectionThread[anki]:Running applyChunk(*[], **{'chunk': {'done': True, 'revlog': [[1690598340609, 1535892745436, 16, 3, -600, -60, 0, 13610, 0]], 'cards': [[1535892745436, 1535892745390, 1690564414917, 0, 1690598340, 16, 1, 1, 1690598980, 0, 0, 1, 0, 1001, 0, 0, 0, '{}']]}})\n",
"[2023-07-29 02:39:03,762]:INFO:ankisyncd:<<<<<::response body or size: b'null' compress size13\n",
"[2023-07-29 02:39:03,763]:INFO:ankisyncd.http:172.18.0.4 \"POST /sync/applyChunk HTTP/1.0\" 200 13\n",
"[2023-07-29 02:39:03,771]:INFO:ankisyncd:>>>>>::request body or size: {'data': b'{\"client\":[[0,0,0],48,48,2,6,6,2,1]}', 'v': 11, 'k': 'bc11cda8e476ff59fcb38b0472d761bf', 'c': '2.1.64,581f82c5,windows', 's': 'WB9Ha'}\n",
"[2023-07-29 02:39:03,772]:INFO:ankisyncd.CollectionThread[anki]:Running sanityCheck2(*[], **{'client': [[0, 0, 0], 48, 48, 2, 6, 6, 2, 1]})\n",
"[2023-07-29 02:39:03,775]:INFO:ankisyncd:<<<<<::response body or size: b'{\"status\": \"ok\"}' compress size25\n",
"[2023-07-29 02:39:03,776]:INFO:ankisyncd.http:172.18.0.4 \"POST /sync/sanityCheck2 HTTP/1.0\" 200 25\n",
"[2023-07-29 02:39:03,784]:INFO:ankisyncd:>>>>>::request body or size: {'data': b'{\"_pad\":null}', 'v': 11, 'k': 'bc11cda8e476ff59fcb38b0472d761bf', 'c': '2.1.64,581f82c5,windows', 's': 'WB9Ha'}\n",
"[2023-07-29 02:39:03,785]:INFO:ankisyncd.CollectionThread[anki]:Running finish(*[], **{})\n",
"[2023-07-29 02:39:03,790]:INFO:ankisyncd:<<<<<::response body or size: b'1690598343786' compress size22\n",
"[2023-07-29 02:39:03,791]:INFO:ankisyncd.http:172.18.0.4 \"POST /sync/finish HTTP/1.0\" 200 22\n"
]
},
{
"name": "stdout",
"output_type": "stream",
"text": [
"cards [[1535892745436, 1535892745390, 1690564414917, 0, 1690598340, 16, 1, 1, 1690598980, 0, 0, 1, 0, 1001, 0, 0, 0, '{}']]\n"
]
},
{
"name": "stderr",
"output_type": "stream",
"text": [
"[2023-07-29 02:40:41,532]:INFO:ankisyncd.ThreadingCollectionManager:Monitor is closing collection on inactive CollectionThread[anki]\n",
"[2023-07-29 02:40:41,538]:INFO:ankisyncd.CollectionThread[anki]:Running _close(*[], **{})\n"
]
}
],
"source": [
"import os\n",
"import sys\n",
"\n",
"import ankisyncd\n",
"from ankisyncd.config import load_from_file\n",
"from ankisyncd.config import load_from_env\n",
"from ankisyncd import logging\n",
"from ankisyncd.sync_app import SyncApp\n",
"import importlib\n",
"importlib.reload(ankisyncd.sync_app)\n",
"from ankisyncd.server import run_server\n",
"\n",
"logger = logging.get_logger(\"ankisyncd\")\n",
"logger.info(\n",
" \"ankisyncd {} ({})\".format(ankisyncd._get_version(), ankisyncd._homepage)\n",
")\n",
"config = load_from_file()\n",
"load_from_env(config)\n",
"\n",
"ankiserver = SyncApp(config)\n",
"run_server(ankiserver, config[\"host\"], int(config[\"port\"]))"
]
},
{
"cell_type": "code",
"execution_count": null,
"id": "e970ec4f",
"metadata": {},
"outputs": [],
"source": []
}
],
"metadata": {
"kernelspec": {
"display_name": "Python 3 (ipykernel)",
"language": "python",
"name": "python3"
},
"language_info": {
"codemirror_mode": {
"name": "ipython",
"version": 3
},
"file_extension": ".py",
"mimetype": "text/x-python",
"name": "python",
"nbconvert_exporter": "python",
"pygments_lexer": "ipython3",
"version": "3.11.4"
}
},
"nbformat": 4,
"nbformat_minor": 5
}

250
anki/readme.ipynb Normal file
View File

@ -0,0 +1,250 @@
{
"cells": [
{
"cell_type": "markdown",
"id": "46c5ef50",
"metadata": {},
"source": [
"# anki sync文档\n",
"## 数据包\n",
"### http\n",
"```python\n",
"same = {\n",
" 'SERVER_PROTOCOL': 'HTTP/1.0', 'REQUEST_METHOD': 'POST', 'PATH_INFO': '/msync/begin', 'QUERY_STRING': '', \n",
"}\n",
"```\n",
"### android 包\n",
"```python\n",
"env = { 'CONTENT_LENGTH': '449','CONTENT_TYPE': 'multipart/form-data; boundary=Anki-sync-boundary', 'HTTP_ACCEPT_ENCODING': 'gzip', 'HTTP_USER_AGENT': 'AnkiDroid-2.15.6'}\n",
"```\n",
"### pc包\n",
"```python\n",
"b'{\"v\":\"anki,2.1.64 (581f82c5),win:10\"}'\n",
"```\n",
"```python\n",
"env = { 'CONTENT_LENGTH': '46', 'CONTENT_TYPE': 'application/octet-stream', 'HTTP_ANKI_SYNC': '{\"v\":11,\"k\":\"a8b327a1e7ccbf704c4a60a23a8c4a57\",\"c\":\"2.1.64,581f82c5,windows\",\"s\":\"cJcynh\"}' }\n",
"```\n",
"\n",
"## PC数据\n",
"### 数据协议\n",
"- 输入\n",
" - path is ::/msync/begin body is::b'(\\xb5/\\xfd\\x00X)\\x01\\x00{\"v\":\"anki,2.1.64 (581f82c5),win:10\"}'\n",
" - path is ::/sync/meta body is::b'(\\xb5/\\xfd\\x00Xi\\x01\\x00{\"v\":11,\"cv\":\"anki,2.1.64 (581f82c5),win:10\"}'\n",
" - path is ::/sync/hostKey body is::b'(\\xb5/\\xfd\\x00X\\xc9\\x00\\x00{\"u\":\"anki\",\"p\":\"ouczbs\"}'\n",
"- 输出\n",
" - 不加密,不压缩\n",
"### 流式解析\n",
"- 在数据较大时,使用结果不太对\n",
"```python\n",
"import pyzstd\n",
"data = b'{\"v\":\"anki,2.1.64 (581f82c5),win:10\"}'\n",
"encoder = pyzstd.ZstdCompressor()\n",
"encoder.compress(data)\n",
"body = encoder.flush()\n",
"print(body)\n",
"decoder = pyzstd.ZstdDecompressor()\n",
"data = decoder.decompress(body)\n",
"print(data)\n",
"```\n",
"### 快速解析\n",
"```python\n",
"import pyzstd\n",
"data = b'(\\xb5/\\xfd\\x00X)\\x01\\x00{\"v\":\"anki,2.1.64 (581f82c5),win:10\"}'\n",
"# with open(\"conf/data.txt\",\"wb\")as f:\n",
"# f.write(data)\n",
"with open(\"data.txt\",\"rb\")as f:\n",
" data = f.read()\n",
"print(len(data))\n",
"body = pyzstd.compress(data)\n",
"print(len(body))\n",
"data = pyzstd.decompress(body)\n",
"print(len(data))\n",
"```\n",
"## Android 数据\n",
"### 数据协议\n",
"- 输入\n",
"```python\n",
"data = b'--Anki-sync-boundary\\r\\nContent-Disposition: form-data; name=\"k\"\\r\\n\\r\\n0361e82baf7e86bf1b5f4339382fccd4\\r\\n--Anki-sync-boundary\\r\\nContent-Disposition: form-data; name=\"s\"\\r\\n\\r\\na87d9d0a\\r\\n--Anki-sync-boundary\\r\\nContent-Disposition: form-data; name=\"c\"\\r\\n\\r\\n1\\r\\n--Anki-sync-boundary\\r\\nContent-Disposition: form-data; name=\"data\"; filename=\"data\"\\r\\nContent-Type: application/octet-stream\\r\\n\\r\\n\\x1f\\x8b\\x08\\x00\\x00\\x00\\x00\\x00\\x00\\x00e\\x91MN\\xc30\\x10\\x85\\xafb\\xcd\\x86\\x8d\\xd5$(M\\x9b\\xa8t\\xc3\\x16!\\x84\\xd8U]\\xb8\\xf1\\xa4\\xb1\\xea\\xd8\\x95\\xed6TUwH\\xdc\\xa0\\x1b\\x16p\\x07\\x84\\xc4\\x81\\x10\\xa2\\xb7\\xc0I\\xf9+\\xdd\\xcd|\\xf3\\xe4yo\\xbc\\x86\\xbcdj\\x8a\\x16\\xb25T\\x9a\\xa3\\xf4\\xd5hL\\x81c>k\\xca\\xd1\\x1a\\x04\\x87,J\\xd2\\xb0\\x9b\\xc4q\\x14\\xa7Q\\x8f6\\xca\\x1f\\xd6Mc\\n\\x8aU\\x08\\x19|\\xbcl\\xdf^\\xb7\\xbb\\x87\\xbb\\xdd\\xe3\\xf3\\xfb\\xd3=PXX\\xe5\\x95\\x11\\x05i\\xd4\\x8d\\xe6l\\xe5\\xdf\\x8ch\\xe87\\x18\\\\\\x1e\\x02\\x85\\xf5!p\\xa2\\xc2_\\x92\\xf4{\\xa9\\x87\\xb9\\x96\\x92\\xcd-z\\x03\\x05\\x93\\x16)L\\x8c\\xae-\\x9a\\xf3\\xa3\\x01G\\x9b{SW\\x12\\x99Eb\\x11\\x89+\\x91\\x0c\\x18)\\r\\x16g\\'\\xa5ss\\x9b\\x05\\x01S3Q\\xe3\\xa4\\xa3\\xd0\\x05\\xb6d\\x06y T\\xa1\\x83^\\xdc\\x8d\\x92~\\x18\\x9f\\x9e\\x0c\\xf7\\x984G!s6\\xc5A\\xc0\\x86\\xa4\\xd0\\x86T\\xda i\\xd4\\x1d\\x9f\\x95\\xaf|\\xd6\\xb0M&\\xb0\\xbe\\x10\\x95p_\\xf6\\xd5B\\xca6\\xe01\\xfc#\\xfe\\xaf\\xfb\\xees\\xad\\n\\x7fD\\nx\\xebP\\xf1K\\xac\\xdb5\\xfb\\xee\\x1a\\x97\\xbe\\xdb\\x8c\\xe9h\\xdc\\x9c\\x8cM\\xdb\\x0f\\xdcl>\\x019A\\x97\\x92\\xda\\x01\\x00\\x00\\r\\n--Anki-sync-boundary--\\r\\n'\n",
"\n",
"data = b'--Anki-sync-boundary\\r\\nContent-Disposition: form-data; name=\"k\"\\r\\n\\r\\nxQ0tcIOYlDyeJhZV\\r\\n--Anki-sync-boundary\\r\\nContent-Disposition: form-data; name=\"s\"\\r\\n\\r\\n8d6272d6\\r\\n--Anki-sync-boundary\\r\\nContent-Disposition: form-data; name=\"c\"\\r\\n\\r\\n1\\r\\n--Anki-sync-boundary\\r\\nContent-Disposition: form-data; name=\"data\"; filename=\"data\"\\r\\nContent-Type: application/octet-stream\\r\\n\\r\\n\\x1f\\x8b\\x08\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\xabV*S\\xb224\\xd0QJ\\x06\\xd2J\\x89y\\xd9\\x99)E\\xf9\\x99):Fz\\x86\\xa6zf:\\x89y`\\xae\\x95\\xa1\\x91\\x95\\xaf\\x91\\x81\\x81\\xb9\\x97a\\xb0\\xb3R-\\x00\\x16\\x80\\xd5\\x825\\x00\\x00\\x00\\r\\n--Anki-sync-boundary--\\r\\n'\n",
"```\n",
"- 输出\n",
" - 不加密zstd压缩\n",
"### 正则匹配\n",
"```python\n",
"import re\n",
"pattern = b'--Anki-sync-boundary\\r\\nContent-Disposition: form-data; name=\"(.*?)\"\\\n",
"(?:; filename=\".*?\"\\r\\nContent-Type: application/octet-stream)*\\r\\n\\r\\n(.*?|[\\s\\S]*?)\\r\\n(?:--Anki-sync-boundary--\\r\\n$)*'\n",
"res = re.findall(pattern,data)\n",
"print(res)\n",
"```\n",
"\n",
"## python\n",
"### api 版本号\n",
"`sync.py >> SYNC_VER = 11`\n",
"### 数据包\n",
"```python\n",
"#class Request\n",
"def parseAndroid(self, body, env):\n",
" env[\"APP_CLIENT_TYPE\"] = ClientType.Android\n",
" request_items_dict = {}\n",
" pattern = b'--Anki-sync-boundary\\r\\nContent-Disposition: form-data; name=\"(.*?)\"\\\n",
"(?:; filename=\".*?\"\\r\\nContent-Type: application/octet-stream)*\\r\\n\\r\\n(.*?|[\\s\\S]*?)\\r\\n(?:--Anki-sync-boundary--\\r\\n$)*'\n",
" res = re.findall(pattern , body)\n",
" for k in res:\n",
" if len(k) < 2:\n",
" logger.error(f\"error pattern match: {k}\")\n",
" continue\n",
" v = k[1] if k[0] == b\"data\" else k[1].decode()\n",
" request_items_dict[k[0].decode()] = v\n",
" if \"data\" in request_items_dict and \"c\" in request_items_dict \\\n",
" and int(request_items_dict[\"c\"]):\n",
" data = request_items_dict[\"data\"] \n",
" with gzip.GzipFile(mode=\"rb\", fileobj=io.BytesIO(data)) as gz:\n",
" data = gz.read()\n",
" request_items_dict[\"data\"] = data\n",
" if \"data\" not in request_items_dict:\n",
" #pdb.set_trace()\n",
" pass\n",
" return request_items_dict\n",
"def parsePC(self, body, env):\n",
" env[\"APP_CLIENT_TYPE\"] = ClientType.PC\n",
" request_items_dict = {}\n",
" body = pyzstd.decompress(body)\n",
" request_items_dict[\"data\"] = body\n",
" http_anki_sync = env.get(\"HTTP_ANKI_SYNC\", \"\")\n",
" if http_anki_sync != \"\":\n",
" anki_sync = json.loads(http_anki_sync)\n",
" for k in anki_sync.keys():\n",
" request_items_dict[k] = anki_sync[k]\n",
" return request_items_dict\n",
" \n",
"def parse(self):\n",
" if 'application/octet-stream' in env.get(\"CONTENT_TYPE\",\"\"):\n",
" request_items_dict = self.parsePC(body, env)\n",
" else:\n",
" request_items_dict = self.parseAndroid(body, env)\n",
"def wrap_body(self, body, env):\n",
" if \"APP_CLIENT_TYPE\" not in env:\n",
" return body\n",
" if env[\"APP_CLIENT_TYPE\"] == ClientType.PC:\n",
" return pyzstd.compress(body)\n",
" # if env[\"APP_CLIENT_TYPE\"] == ClientType.Android:\n",
" # return body\n",
" return body\n",
"```\n",
"\n",
"### 注解\n",
"```python\n",
"import types\n",
"from functools import wraps\n",
"class chunked(object):\n",
" \"\"\"decorator\"\"\"\n",
" def __init__(self, func):\n",
" wraps(func)(self)\n",
" print(\"__init__ chunked\",func)\n",
"\n",
" def __call__(self, *args, **kwargs):\n",
" clss = args[0]\n",
" environ = args[1]\n",
" start_response = args[2]\n",
" args = (\n",
" clss,\n",
" environ,\n",
" )\n",
" w = self.__wrapped__(*args, **kwargs)\n",
" print(\"__call__ chunked\",clss,environ,start_response,kwargs,w)\n",
" return \"4\"\n",
"\n",
" def __get__(self, instance, cls):\n",
" print(\"__get__ chunked\", instance , cls)\n",
" if instance is None:\n",
" return self\n",
" else:\n",
" return types.MethodType(self, instance)\n",
"class SyncApp:\n",
" def __init__(self, config):\n",
" print(\"__init__SyncApp\",config)\n",
"\n",
" @chunked\n",
" def __call__(self, req):\n",
" print(\"__call__SyncApp\",req)\n",
" return \"3\"\n",
"sync = SyncApp(\"config\")\n",
"sync(1,2)\n",
" ```\n",
"### webob\n",
"- app\n",
"- environ\n",
" - 环境变量\n",
" - 请求数据\n",
" - body env.get(\"wsgi.input\")\n",
" - header env.get(\"HTTP_ANKI_SYNC\", \"\")\n",
"- request\n",
"- response\n",
"\n",
"## rust\n",
"```rust\n",
"pub fn decode_zstd_body_stream_for_client<S, E>(data: S) -> impl Stream<Item = HttpResult<Bytes>>\n",
"where\n",
" S: Stream<Item = Result<Bytes, E>> + Unpin,\n",
" E: Display,\n",
"{\n",
" let response_total = resp\n",
" .headers()\n",
" .get(&ORIGINAL_SIZE)\n",
" .and_then(|v| v.to_str().ok())\n",
" .and_then(|v| v.parse::<u32>().ok())\n",
" .or_bad_request(\"missing original size\")?;\n",
" stream.map(move |res| match res {\n",
" Ok(bytes) => {\n",
" let mut inner = inner.lock().unwrap();\n",
" inner.last_activity = Instant::now();\n",
" if sending {\n",
" inner.bytes_sent += bytes.len() as u32;\n",
" } else {\n",
" inner.bytes_received += bytes.len() as u32;\n",
" }\n",
" Ok(bytes)\n",
" }\n",
" err => err.or_http_err(StatusCode::SEE_OTHER, \"stream failure\"),\n",
" })\n",
" \n",
"}\n",
"```\n",
"- and_then map match\n",
"- |v| ?\n",
"- where impl trait"
]
},
{
"cell_type": "code",
"execution_count": null,
"id": "0e63eb4d",
"metadata": {},
"outputs": [],
"source": []
}
],
"metadata": {
"kernelspec": {
"display_name": "Python 3 (ipykernel)",
"language": "python",
"name": "python3"
},
"language_info": {
"codemirror_mode": {
"name": "ipython",
"version": 3
},
"file_extension": ".py",
"mimetype": "text/x-python",
"name": "python",
"nbconvert_exporter": "python",
"pygments_lexer": "ipython3",
"version": "3.11.4"
}
},
"nbformat": 4,
"nbformat_minor": 5
}

87
docker-compose.yml Normal file
View File

@ -0,0 +1,87 @@
# yaml 配置
version: '3'
networks:
nginx:
external: false
services:
jupyter:
build: ./jupyter
image: ouczbs:jupyter
container_name: jupyter
restart: always
networks:
- nginx
ports:
- "8888:8888"
environment:
- XDG_CONFIG_HOME=/data/anki
volumes:
- /root/docker:/data/jupyter
- /data/anki:/data/anki
anki:
build: ./anki
image: ouczbs:anki
container_name: anki
restart: always
ports:
- "27701:27001"
networks:
- nginx
volumes:
- /data/anki:/data/anki
- /root/docker/anki/anki-sync-server:/app/anki/anki-sync-server
nginx:
image: nginx
container_name: nginx
restart: always
networks:
- nginx
ports:
# - 443:443
# - 80:80
- 27443:27443
- 27001:27001
volumes:
- ./nginx/nginx.conf:/etc/nginx/nginx.conf
- ./nginx/conf.d:/etc/nginx/conf.d
- ./nginx/ssl:/etc/nginx/ssl
- ./nginx/html:/usr/share/nginx/html
#- ./nginx:/usr/share/nginx/html
# - /root/docker/nginx/logs:/var/log/nginx
gitea:
image: gitea/gitea:latest
container_name: gitea
environment:
- USER_UID=1000
- USER_GID=1000
- GITEA__database__DB_TYPE=mysql
- GITEA__database__HOST=db:3306
- GITEA__database__NAME=gitea
- GITEA__database__USER=ouczbs
- GITEA__database__PASSWD=ouczbs
restart: always
networks:
- nginx
volumes:
- /root/.ssh:/root/.ssh
- /data/gitea:/data
- /etc/timezone:/etc/timezone:ro
- /etc/localtime:/etc/localtime:ro
ports:
- "3000:3000"
- "222:22"
depends_on:
- db
db:
image: mysql:8
restart: always
container_name: mysql
environment:
- MYSQL_ROOT_PASSWORD=ouczbs
- MYSQL_USER=ouczbs
- MYSQL_PASSWORD=ouczbs
- MYSQL_DATABASE=gitea
networks:
- nginx
volumes:
- /data/mysql:/var/lib/mysql

232
gitea/gitea.ipynb Normal file
View File

@ -0,0 +1,232 @@
{
"cells": [
{
"cell_type": "code",
"execution_count": 1,
"id": "98be7050",
"metadata": {},
"outputs": [
{
"name": "stdout",
"output_type": "stream",
"text": [
"Failed to set locale, defaulting to C.UTF-8\n",
"CentOS Linux 8 - AppStream 9.8 B/s | 38 B 00:03 \n",
"Error: Failed to download metadata for repo 'appstream': Cannot prepare internal mirrorlist: No URLs in mirrorlist\n"
]
}
],
"source": [
"import paramiko\n",
"\n",
"\n",
"class SshClass:\n",
" \"\"\"\n",
" ssh连接对象\n",
" 本对象提供了密钥连接、密码连接、命令执行、关闭连接\n",
" \"\"\"\n",
" ip = ''\n",
" port = 22\n",
" username = ''\n",
" timeout = 0\n",
" ssh = None\n",
"\n",
" def __init__(self, ip, username, port=22, timeout=30):\n",
" \"\"\"\n",
" 初始化ssh对象\n",
" :param ip: str 主机IP\n",
" :param username: str 登录用户名\n",
" :param port: int ssh端口\n",
" :param timeout: int 连接超时\n",
" \"\"\"\n",
" self.ip = ip\n",
" self.username = username\n",
" self.port = port\n",
" self.timeout = timeout\n",
" ssh = paramiko.SSHClient()\n",
" ssh.set_missing_host_key_policy(paramiko.AutoAddPolicy())\n",
" self.ssh = ssh\n",
"\n",
" def conn_by_key(self, key):\n",
" \"\"\"\n",
" 密钥连接\n",
" :param key: str rsa密钥路径\n",
" :return: ssh连接对象\n",
" \"\"\"\n",
" rsa_key = paramiko.RSAKey.from_private_key_file(key)\n",
" self.ssh.connect(hostname=self.ip, port=self.port, username=self.username, pkey=rsa_key, timeout=self.timeout)\n",
" if self.ssh:\n",
" print(\"密钥连接成功.\")\n",
" else:\n",
" self.close()\n",
" raise Exception(\"密钥连接失败.\")\n",
"\n",
" def conn_by_pwd(self, pwd):\n",
" \"\"\"\n",
" 密码连接\n",
" :param pwd: str 登录密码\n",
" :return: ssh连接对象\n",
" \"\"\"\n",
" self.ssh.connect(hostname=self.ip, port=self.port, username=self.username, password=pwd)\n",
" if self.ssh:\n",
" print(\"密码连接成功.\")\n",
" else:\n",
" self.close()\n",
" raise Exception(\"密码连接失败.\")\n",
"\n",
" def exec_command(self, command):\n",
" \"\"\"\n",
" 命令控制\n",
" :param command: str 命令\n",
" :return: dict 命令执行的返回结果\n",
" \"\"\"\n",
" if command:\n",
" stdin, stdout, stderr = self.ssh.exec_command(command)\n",
" return {\n",
" \"stdin\": command,\n",
" \"stdout\": stdout.read(),\n",
" \"stderr\": stderr.read()\n",
" }\n",
" else:\n",
" self.close()\n",
" raise Exception(\"命令不能为空字符串.\")\n",
"\n",
" def close(self):\n",
" \"\"\"\n",
" 关闭当前连接\n",
" :return:\n",
" \"\"\"\n",
" if self.ssh:\n",
" self.ssh.close()\n",
" else:\n",
" raise Exception(\"ssh关闭失败当前对象并没有ssh连接.\")\n",
"\n",
"\n",
"if __name__ == '__main__':\n",
" SSH = SshClass(\"111.111.6.115\", \"root\", port=22)\n",
" SSH.conn_by_pwd(\"123456\")\n",
" print(SSH.exec_command(\"ls\"))\n"
]
},
{
"cell_type": "code",
"execution_count": 1,
"id": "dd7e564d",
"metadata": {},
"outputs": [],
"source": [
"#!pip install paramiko\n",
"import paramiko"
]
},
{
"cell_type": "code",
"execution_count": 2,
"id": "9c7053e9",
"metadata": {},
"outputs": [],
"source": [
"ssh = paramiko.SSHClient()\n",
"ssh.set_missing_host_key_policy(paramiko.AutoAddPolicy())"
]
},
{
"cell_type": "code",
"execution_count": 9,
"id": "7b0a0f37",
"metadata": {},
"outputs": [],
"source": [
"key = \"./ssh/id_rsa\"\n",
"rsa_key = paramiko.RSAKey.from_private_key_file(key)"
]
},
{
"cell_type": "code",
"execution_count": 10,
"id": "cb215350",
"metadata": {},
"outputs": [
{
"data": {
"text/plain": [
"PKey(alg=RSA, bits=3072, fp=SHA256:NR5wmeI1gzG4h8L+2wPchaM/8Zmliog2Pe5hNH8LW7E)"
]
},
"execution_count": 10,
"metadata": {},
"output_type": "execute_result"
}
],
"source": [
"rsa_key \n",
"#SHA256:NR5wmeI1gzG4h8L+2wPchaM/8Zmliog2Pe5hNH8LW7E\n",
"#SHA256:NR5wmeI1gzG4h8L+2wPchaM/8Zmliog2Pe5hNH8LW7E"
]
},
{
"cell_type": "code",
"execution_count": 15,
"id": "17267b54",
"metadata": {},
"outputs": [
{
"data": {
"text/plain": [
"<bound method SSHClient._log of <paramiko.client.SSHClient object at 0x7fde96651ad0>>"
]
},
"execution_count": 15,
"metadata": {},
"output_type": "execute_result"
}
],
"source": [
"ssh._log"
]
},
{
"cell_type": "code",
"execution_count": 25,
"id": "327bb122",
"metadata": {},
"outputs": [],
"source": [
"ip = \"175.24.226.114\"\n",
"port = 222\n",
"username = \"git\"\n",
"timeout = 30\n",
"ssh.connect(hostname=ip, port=port, username=username, pkey=rsa_key, timeout=timeout)"
]
},
{
"cell_type": "code",
"execution_count": null,
"id": "da988de0",
"metadata": {},
"outputs": [],
"source": []
}
],
"metadata": {
"kernelspec": {
"display_name": "Python 3 (ipykernel)",
"language": "python",
"name": "python3"
},
"language_info": {
"codemirror_mode": {
"name": "ipython",
"version": 3
},
"file_extension": ".py",
"mimetype": "text/x-python",
"name": "python",
"nbconvert_exporter": "python",
"pygments_lexer": "ipython3",
"version": "3.11.4"
}
},
"nbformat": 4,
"nbformat_minor": 5
}

22
jupyter/Dockerfile Normal file
View File

@ -0,0 +1,22 @@
FROM centos:8 as jupyter
USER root
ENV ROOT_DIR /root/docker
WORKDIR /data/jupyter
COPY --from=ouczbs:miniconda ${ROOT_DIR} ${ROOT_DIR}
RUN cat ${ROOT_DIR}/miniconda.bashrc >> /root/.bashrc && rm ${ROOT_DIR}/miniconda.bashrc &&\
source ~/.bashrc && mv ${ROOT_DIR}/tini /tini && chmod +x /tini
RUN source ~/.bashrc && conda config --add channels https://mirrors.tuna.tsinghua.edu.cn/anaconda/pkgs/free/ &&\
conda config --set show\_channel\_urls yes &&\
conda install jupyter notebook -y && jupyter notebook --generate-config
VOLUME [ "/data" ]
COPY conf/pip.conf /root/.pip/pip.conf
COPY conf/jupyter_notebook_config.py /root/.jupyter/jupyter_notebook_config.py
COPY shell/start.sh /usr/local/bin/start.sh
RUN chmod 777 /usr/local/bin/start.sh
EXPOSE 8888
CMD bash /usr/local/bin/start.sh
ENTRYPOINT ["/tini", "--"] CMD

View File

@ -0,0 +1,952 @@
# Configuration file for jupyter-notebook.
#------------------------------------------------------------------------------
# Application(SingletonConfigurable) configuration
#------------------------------------------------------------------------------
## This is an application.
## The date format used by logging formatters for %(asctime)s
#c.Application.log_datefmt = '%Y-%m-%d %H:%M:%S'
## The Logging format template
#c.Application.log_format = '[%(name)s]%(highlevel)s %(message)s'
## Set the log level by value or name.
#c.Application.log_level = 30
#------------------------------------------------------------------------------
# JupyterApp(Application) configuration
#------------------------------------------------------------------------------
## Base class for Jupyter applications
## Answer yes to any prompts.
#c.JupyterApp.answer_yes = False
## Full path of a config file.
#c.JupyterApp.config_file = ''
## Specify a config file to load.
#c.JupyterApp.config_file_name = ''
## Generate default config file.
#c.JupyterApp.generate_config = False
#------------------------------------------------------------------------------
# NotebookApp(JupyterApp) configuration
#------------------------------------------------------------------------------
## 请把Access-Control-Allow-Credentials设置为true header
#c.NotebookApp.allow_credentials = False
## Set the Access-Control-Allow-Origin header
#
# Use '*' to allow any origin to access your server.
#
# Takes precedence over allow_origin_pat.
#c.NotebookApp.allow_origin = ''
## Use a regular expression for the Access-Control-Allow-Origin header
#
# Requests from an origin matching the expression will get replies with:
#
# Access-Control-Allow-Origin: origin
#
# where `origin` is the origin of the request.
#
# Ignored if allow_origin is set.
#c.NotebookApp.allow_origin_pat = ''
## Allow password to be changed at login for the notebook server.
#
# While logging in with a token, the notebook server UI will give the
# opportunity to the user to enter a new password at the same time that will
# replace the token login mechanism.
#
# This can be set to false to prevent changing password from the UI/API.
#c.NotebookApp.allow_password_change = True
## Allow requests where the Host header doesn't point to a local server
#
# By default, requests get a 403 forbidden response if the 'Host' header shows
# that the browser thinks it's on a non-local domain. Setting this option to
# True disables this check.
#
# This protects against 'DNS rebinding' attacks, where a remote web server
# serves you a page and then changes its DNS to send later requests to a local
# IP, bypassing same-origin checks.
#
# Local IP addresses (such as 127.0.0.1 and ::1) are allowed as local, along
# with hostnames configured in local_hostnames.
#c.NotebookApp.allow_remote_access = False
## 是否允许notebook在root用户下运行。
#c.NotebookApp.allow_root = False
## " Require authentication to access prometheus metrics.
#c.NotebookApp.authenticate_prometheus = True
## Reload the webapp when changes are made to any Python src files.
#c.NotebookApp.autoreload = False
## 已被弃用!请使用 base_url
#c.NotebookApp.base_project_url = '/'
## The base URL for the notebook server.
#
# Leading and trailing slashes can be omitted, and will automatically be added.
#c.NotebookApp.base_url = '/'
## Specify what command to use to invoke a web browser when opening the notebook.
# If not specified, the default browser will be determined by the `webbrowser`
# standard library module, which allows setting of the BROWSER environment
# variable to override it.
#c.NotebookApp.browser = ''
## SSL/TLS 认证文件所在的完整路径。
#c.NotebookApp.certfile = ''
## SSL/TLS 客户端身份验证的证书所在的完整路径。
#c.NotebookApp.client_ca = ''
## The config manager class to use
#c.NotebookApp.config_manager_class = 'notebook.services.config.manager.ConfigManager'
## The notebook manager class to use.
#c.NotebookApp.contents_manager_class = 'notebook.services.contents.largefilemanager.LargeFileManager'
## Extra keyword arguments to pass to `set_secure_cookie`. See tornado's
# set_secure_cookie docs for details.
#c.NotebookApp.cookie_options = {}
## The random bytes used to secure cookies. By default this is a new random
# number every time you start the Notebook. Set it to a value in a config file
# to enable logins to persist across server sessions.
#
# Note: Cookie secrets should be kept private, do not share config files with
# cookie_secret stored in plaintext (you can read the value from a file).
#c.NotebookApp.cookie_secret = b''
## 那个存放cookie密钥的文件。
#c.NotebookApp.cookie_secret_file = ''
## Override URL shown to users.
#
# Replace actual URL, including protocol, address, port and base URL, with the
# given value when displaying URL to the users. Do not change the actual
# connection URL. If authentication token is enabled, the token is added to the
# custom URL automatically.
#
# This option is intended to be used when the URL to display to the user cannot
# be determined reliably by the Jupyter notebook server (proxified or
# containerized setups for example).
#c.NotebookApp.custom_display_url = ''
## 从 `/` 重定向到的默认URL
#c.NotebookApp.default_url = '/tree'
## Disable cross-site-request-forgery protection
#
# Jupyter notebook 4.3.1 introduces protection from cross-site request
# forgeries, requiring API requests to either:
#
# - originate from pages served by this server (validated with XSRF cookie and
# token), or - authenticate with a token
#
# Some anonymous compute resources still desire the ability to run code,
# completely without authentication. These services can disable all
# authentication and security checks, with the full knowledge of what that
# implies.
#c.NotebookApp.disable_check_xsrf = False
## Whether to enable MathJax for typesetting math/TeX
#
# MathJax is the javascript library Jupyter uses to render math/LaTeX. It is
# very large, so you may want to disable it if you have a slow internet
# connection, or for offline use of the notebook.
#
# When disabled, equations etc. will appear as their untransformed TeX source.
#c.NotebookApp.enable_mathjax = True
## 额外寻找Javascript notebook扩展的路径
#c.NotebookApp.extra_nbextensions_path = []
## handlers that should be loaded at higher priority than the default services
#c.NotebookApp.extra_services = []
## Extra paths to search for serving static files.
#
# This allows adding javascript/css to be available from the notebook server
# machine, or overriding individual files in the IPython
#c.NotebookApp.extra_static_paths = []
## 额外寻找Jinja模板的路径。
#
# 可以被用来覆盖 notebook.templates 里的模板
#c.NotebookApp.extra_template_paths = []
##
#c.NotebookApp.file_to_run = ''
## Extra keyword arguments to pass to `get_secure_cookie`. See tornado's
# get_secure_cookie docs for details.
#c.NotebookApp.get_secure_cookie_kwargs = {}
## Deprecated: Use minified JS file or not, mainly use during dev to avoid JS
# recompilation
#c.NotebookApp.ignore_minified_js = False
## (bytes/sec) Maximum rate at which stream output can be sent on iopub before
# they are limited.
#c.NotebookApp.iopub_data_rate_limit = 1000000
## (msgs/sec) Maximum rate at which messages can be sent on iopub before they are
# limited.
#c.NotebookApp.iopub_msg_rate_limit = 1000
## notebook服务会监听的IP地址。
#c.NotebookApp.ip = 'localhost'
## 请添加Jinja环境所需要的额外的变量。
#c.NotebookApp.jinja_environment_options = {}
## 需要额外的变量去渲染Jinja模板。
#c.NotebookApp.jinja_template_vars = {}
## The kernel manager class to use.
#c.NotebookApp.kernel_manager_class = 'notebook.services.kernels.kernelmanager.MappingKernelManager'
## The kernel spec manager class to use. Should be a subclass of
# `jupyter_client.kernelspec.KernelSpecManager`.
#
# The Api of KernelSpecManager is provisional and might change without warning
# between this version of Jupyter and the next stable one.
#c.NotebookApp.kernel_spec_manager_class = 'jupyter_client.kernelspec.KernelSpecManager'
## SSL/TLS 私钥文件所在的完整路径。
#c.NotebookApp.keyfile = ''
## Hostnames to allow as local when allow_remote_access is False.
#
# Local IP addresses (such as 127.0.0.1 and ::1) are automatically accepted as
# local as well.
#c.NotebookApp.local_hostnames = ['localhost']
## Set to True to enable JSON formatted logs. Run "pip install notebook[json-
# logging]" to install the required dependent packages. Can also be set using
# the environment variable JUPYTER_ENABLE_JSON_LOGGING=true.
#c.NotebookApp.log_json = False
## The login handler class to use.
#c.NotebookApp.login_handler_class = 'notebook.auth.login.LoginHandler'
## The logout handler class to use.
#c.NotebookApp.logout_handler_class = 'notebook.auth.logout.LogoutHandler'
## The MathJax.js configuration file that is to be used.
#c.NotebookApp.mathjax_config = 'TeX-AMS-MML_HTMLorMML-full,Safe'
## A custom url for MathJax.js. Should be in the form of a case-sensitive url to
# MathJax, for example: /static/components/MathJax/MathJax.js
#c.NotebookApp.mathjax_url = ''
## Sets the maximum allowed size of the client request body, specified in the
# Content-Length request header field. If the size in a request exceeds the
# configured value, a malformed HTTP message is returned to the client.
#
# Note: max_body_size is applied even in streaming mode.
#c.NotebookApp.max_body_size = 536870912
## Gets or sets the maximum amount of memory, in bytes, that is allocated for use
# by the buffer manager.
#c.NotebookApp.max_buffer_size = 536870912
## Gets or sets a lower bound on the open file handles process resource limit.
# This may need to be increased if you run into an OSError: [Errno 24] Too many
# open files. This is not applicable when running on Windows.
#c.NotebookApp.min_open_files_limit = 0
## Dict of Python modules to load as notebook server extensions. Entry values can
# be used to enable and disable the loading of the extensions. The extensions
# will be loaded in alphabetical order.
#c.NotebookApp.nbserver_extensions = {}
## 用于notebooks和内核的目录。
#c.NotebookApp.notebook_dir = ''
## Whether to open in a browser after starting. The specific browser used is
# platform dependent and determined by the python standard library `webbrowser`
# module, unless it is overridden using the --browser (NotebookApp.browser)
# configuration option.
#c.NotebookApp.open_browser = True
## Hashed password to use for web authentication.
#
# To generate, type in a python/IPython shell:
#
# from notebook.auth import passwd; passwd()
#
# The string should be of the form type:salt:hashed-password.
#c.NotebookApp.password = ''
## Forces users to use a password for the Notebook server. This is useful in a
# multi user environment, for instance when everybody in the LAN can access each
# other's machine through ssh.
#
# In such a case, serving the notebook server on localhost is not secure since
# any user can connect to the notebook server via ssh.
#c.NotebookApp.password_required = False
## The port the notebook server will listen on (env: JUPYTER_PORT).
#c.NotebookApp.port = 8888
## The number of additional ports to try if the specified port is not available
# (env: JUPYTER_PORT_RETRIES).
#c.NotebookApp.port_retries = 50
## 尚未启用请在Notebook里使用 %pylab or %matplotlib 指令去启用matplotlib。
#c.NotebookApp.pylab = 'disabled'
## If True, display a button in the dashboard to quit (shutdown the notebook
# server).
#c.NotebookApp.quit_button = True
## (sec) Time window used to check the message and data rate limits.
#c.NotebookApp.rate_limit_window = 3
## 加载服务器扩展时,遇到重新引发的异常?
#c.NotebookApp.reraise_server_extension_failures = False
## 已被弃用!请使用 nbserver_extensions dict 指令
#c.NotebookApp.server_extensions = []
## The session manager class to use.
#c.NotebookApp.session_manager_class = 'notebook.services.sessions.sessionmanager.SessionManager'
## Shut down the server after N seconds with no kernels or terminals running and
# no activity. This can be used together with culling idle kernels
# (MappingKernelManager.cull_idle_timeout) to shutdown the notebook server when
# it's not in use. This is not precisely timed: it may shut down up to a minute
# later. 0 (the default) disables this automatic shutdown.
#c.NotebookApp.shutdown_no_activity_timeout = 0
## The UNIX socket the notebook server will listen on.
#c.NotebookApp.sock = ''
## The permissions mode for UNIX socket creation (default: 0600).
#c.NotebookApp.sock_mode = '0600'
## Supply SSL options for the tornado HTTPServer. See the tornado docs for
# details.
#c.NotebookApp.ssl_options = {}
## Supply overrides for terminado. Currently only supports "shell_command". On
# Unix, if "shell_command" is not provided, a non-login shell is launched by
# default when the notebook server is connected to a terminal, a login shell
# otherwise.
#c.NotebookApp.terminado_settings = {}
## Set to False to disable terminals.
#
# This does *not* make the notebook server more secure by itself. Anything the
# user can in a terminal, they can also do in a notebook.
#
# Terminals may also be automatically disabled if the terminado package is not
# available.
#c.NotebookApp.terminals_enabled = True
## Token used for authenticating first-time connections to the server.
#
# The token can be read from the file referenced by JUPYTER_TOKEN_FILE or set
# directly with the JUPYTER_TOKEN environment variable.
#
# When no password is enabled, the default is to generate a new, random token.
#
# Setting to an empty string disables authentication altogether, which is NOT
# RECOMMENDED.
#c.NotebookApp.token = '<generated>'
## Supply重写了Jupyter notebook正在使用的tornado.web.Application。
#c.NotebookApp.tornado_settings = {}
## Whether to trust or not X-Scheme/X-Forwarded-Proto and X-Real-Ip/X-Forwarded-
# For headers sent by the upstream reverse proxy. Necessary if the proxy handles
# SSL
#c.NotebookApp.trust_xheaders = False
## Disable launching browser by redirect file
#
# For versions of notebook > 5.7.2, a security feature measure was added that
# prevented the authentication token used to launch the browser from being
# visible. This feature makes it difficult for other users on a multi-user
# system from running code in your Jupyter session as you.
#
# However, some environments (like Windows Subsystem for Linux (WSL) and
# Chromebooks), launching a browser using a redirect file can lead the browser
# failing to load. This is because of the difference in file structures/paths
# between the runtime and the browser.
#
# Disabling this setting to False will disable this behavior, allowing the
# browser to launch by using a URL and visible token (as before).
#c.NotebookApp.use_redirect_file = True
## 已被弃用!请使用 tornado_settings
#c.NotebookApp.webapp_settings = {}
## Specify Where to open the notebook on startup. This is the `new` argument
# passed to the standard library method `webbrowser.open`. The behaviour is not
# guaranteed, but depends on browser support. Valid values are:
#
# - 2 opens a new tab,
# - 1 opens a new window,
# - 0 opens in an existing window.
#
# See the `webbrowser.open` documentation for details.
#c.NotebookApp.webbrowser_open_new = 2
## Set the tornado compression options for websocket connections.
#
# This value will be returned from
# :meth:`WebSocketHandler.get_compression_options`. None (default) will disable
# compression. A dict (even an empty one) will enable compression.
#
# See the tornado docs for WebSocketHandler.get_compression_options for details.
#c.NotebookApp.websocket_compression_options = None
## The base URL for websockets, if it differs from the HTTP server (hint: it
# almost certainly doesn't).
#
# Should be in the form of an HTTP origin: ws[s]://hostname[:port]
#c.NotebookApp.websocket_url = ''
#------------------------------------------------------------------------------
# ConnectionFileMixin(LoggingConfigurable) configuration
#------------------------------------------------------------------------------
## Mixin for configurable classes that work with connection files
## JSON file in which to store connection info [default: kernel-<pid>.json]
#
# This file will contain the IP, ports, and authentication key needed to connect
# clients to this kernel. By default, this file will be created in the security
# dir of the current profile, but can be specified by absolute path.
#c.ConnectionFileMixin.connection_file = ''
## set the control (ROUTER) port [default: random]
#c.ConnectionFileMixin.control_port = 0
## set the heartbeat port [default: random]
#c.ConnectionFileMixin.hb_port = 0
## set the iopub (PUB) port [default: random]
#c.ConnectionFileMixin.iopub_port = 0
## Set the kernel's IP address [default localhost]. If the IP address is
# something other than localhost, then Consoles on other machines will be able
# to connect to the Kernel, so be careful!
#c.ConnectionFileMixin.ip = ''
## set the shell (ROUTER) port [default: random]
#c.ConnectionFileMixin.shell_port = 0
## set the stdin (ROUTER) port [default: random]
#c.ConnectionFileMixin.stdin_port = 0
##
#c.ConnectionFileMixin.transport = 'tcp'
#------------------------------------------------------------------------------
# KernelManager(ConnectionFileMixin) configuration
#------------------------------------------------------------------------------
## Manages a single kernel in a subprocess on this host.
#
# This version starts kernels with Popen.
## Should we autorestart the kernel if it dies.
#c.KernelManager.autorestart = True
## Time to wait for a kernel to terminate before killing it, in seconds. When a
# shutdown request is initiated, the kernel will be immediately sent an
# interrupt (SIGINT), followedby a shutdown_request message, after 1/2 of
# `shutdown_wait_time`it will be sent a terminate (SIGTERM) request, and finally
# at the end of `shutdown_wait_time` will be killed (SIGKILL). terminate and
# kill may be equivalent on windows. Note that this value can beoverridden by
# the in-use kernel provisioner since shutdown times mayvary by provisioned
# environment.
#c.KernelManager.shutdown_wait_time = 5.0
#------------------------------------------------------------------------------
# Session(Configurable) configuration
#------------------------------------------------------------------------------
## Object for handling serialization and sending of messages.
#
# The Session object handles building messages and sending them with ZMQ sockets
# or ZMQStream objects. Objects can communicate with each other over the
# network via Session objects, and only need to work with the dict-based IPython
# message spec. The Session will handle serialization/deserialization, security,
# and metadata.
#
# Sessions support configurable serialization via packer/unpacker traits, and
# signing with HMAC digests via the key/keyfile traits.
#
# Parameters ----------
#
# debug : bool
# whether to trigger extra debugging statements
# packer/unpacker : str : 'json', 'pickle' or import_string
# importstrings for methods to serialize message parts. If just
# 'json' or 'pickle', predefined JSON and pickle packers will be used.
# Otherwise, the entire importstring must be used.
#
# The functions must accept at least valid JSON input, and output *bytes*.
#
# For example, to use msgpack:
# packer = 'msgpack.packb', unpacker='msgpack.unpackb'
# pack/unpack : callables
# You can also set the pack/unpack callables for serialization directly.
# session : bytes
# the ID of this Session object. The default is to generate a new UUID.
# username : unicode
# username added to message headers. The default is to ask the OS.
# key : bytes
# The key used to initialize an HMAC signature. If unset, messages
# will not be signed or checked.
# keyfile : filepath
# The file containing a key. If this is set, `key` will be initialized
# to the contents of the file.
## Threshold (in bytes) beyond which an object's buffer should be extracted to
# avoid pickling.
#c.Session.buffer_threshold = 1024
## Whether to check PID to protect against calls after fork.
#
# This check can be disabled if fork-safety is handled elsewhere.
#c.Session.check_pid = True
## Threshold (in bytes) beyond which a buffer should be sent without copying.
#c.Session.copy_threshold = 65536
## Debug output in the Session
#c.Session.debug = False
## The maximum number of digests to remember.
#
# The digest history will be culled when it exceeds this value.
#c.Session.digest_history_size = 65536
## The maximum number of items for a container to be introspected for custom
# serialization. Containers larger than this are pickled outright.
#c.Session.item_threshold = 64
## execution key, for signing messages.
#c.Session.key = b''
## path to file containing execution key.
#c.Session.keyfile = ''
## Metadata dictionary, which serves as the default top-level metadata dict for
# each message.
#c.Session.metadata = {}
## The name of the packer for serializing messages. Should be one of 'json',
# 'pickle', or an import name for a custom callable serializer.
#c.Session.packer = 'json'
## The UUID identifying this session.
#c.Session.session = ''
## The digest scheme used to construct the message signatures. Must have the form
# 'hmac-HASH'.
#c.Session.signature_scheme = 'hmac-sha256'
## The name of the unpacker for unserializing messages. Only used with custom
# functions for `packer`.
#c.Session.unpacker = 'json'
## Username for the Session. Default is your system username.
#c.Session.username = 'root'
#------------------------------------------------------------------------------
# MultiKernelManager(LoggingConfigurable) configuration
#------------------------------------------------------------------------------
## A class for managing multiple kernels.
## The name of the default kernel to start
#c.MultiKernelManager.default_kernel_name = 'python3'
## The kernel manager class. This is configurable to allow subclassing of the
# KernelManager for customized behavior.
#c.MultiKernelManager.kernel_manager_class = 'jupyter_client.ioloop.IOLoopKernelManager'
## Share a single zmq.Context to talk to all my kernels
#c.MultiKernelManager.shared_context = True
#------------------------------------------------------------------------------
# MappingKernelManager(MultiKernelManager) configuration
#------------------------------------------------------------------------------
## A KernelManager that handles notebook mapping and HTTP error handling
## White list of allowed kernel message types. When the list is empty, all
# message types are allowed.
#c.MappingKernelManager.allowed_message_types = []
## Whether messages from kernels whose frontends have disconnected should be
# buffered in-memory. When True (default), messages are buffered and replayed on
# reconnect, avoiding lost messages due to interrupted connectivity. Disable if
# long-running kernels will produce too much output while no frontends are
# connected.
#c.MappingKernelManager.buffer_offline_messages = True
## Whether to consider culling kernels which are busy. Only effective if
# cull_idle_timeout > 0.
#c.MappingKernelManager.cull_busy = False
## Whether to consider culling kernels which have one or more connections. Only
# effective if cull_idle_timeout > 0.
#c.MappingKernelManager.cull_connected = False
## Timeout (in seconds) after which a kernel is considered idle and ready to be
# culled. Values of 0 or lower disable culling. Very short timeouts may result
# in kernels being culled for users with poor network connections.
#c.MappingKernelManager.cull_idle_timeout = 0
## The interval (in seconds) on which to check for idle kernels exceeding the
# cull timeout value.
#c.MappingKernelManager.cull_interval = 300
## Timeout for giving up on a kernel (in seconds). On starting and restarting
# kernels, we check whether the kernel is running and responsive by sending
# kernel_info_requests. This sets the timeout in seconds for how long the kernel
# can take before being presumed dead. This affects the MappingKernelManager
# (which handles kernel restarts) and the ZMQChannelsHandler (which handles the
# startup).
#c.MappingKernelManager.kernel_info_timeout = 60
##
#c.MappingKernelManager.root_dir = ''
#------------------------------------------------------------------------------
# KernelSpecManager(LoggingConfigurable) configuration
#------------------------------------------------------------------------------
## List of allowed kernel names.
#
# By default, all installed kernels are allowed.
#c.KernelSpecManager.allowed_kernelspecs = set()
## If there is no Python kernelspec registered and the IPython kernel is
# available, ensure it is added to the spec list.
#c.KernelSpecManager.ensure_native_kernel = True
## The kernel spec class. This is configurable to allow subclassing of the
# KernelSpecManager for customized behavior.
#c.KernelSpecManager.kernel_spec_class = 'jupyter_client.kernelspec.KernelSpec'
## Deprecated, use `KernelSpecManager.allowed_kernelspecs`
#c.KernelSpecManager.whitelist = set()
#------------------------------------------------------------------------------
# ContentsManager(LoggingConfigurable) configuration
#------------------------------------------------------------------------------
## Base class for serving files and directories.
#
# This serves any text or binary file, as well as directories, with special
# handling for JSON notebook documents.
#
# Most APIs take a path argument, which is always an API-style unicode path, and
# always refers to a directory.
#
# - unicode, not url-escaped
# - '/'-separated
# - leading and trailing '/' will be stripped
# - if unspecified, path defaults to '',
# indicating the root path.
## Allow access to hidden files
#c.ContentsManager.allow_hidden = False
##
#c.ContentsManager.checkpoints = None
##
#c.ContentsManager.checkpoints_class = 'notebook.services.contents.checkpoints.Checkpoints'
##
#c.ContentsManager.checkpoints_kwargs = {}
## handler class to use when serving raw file requests.
#
# Default is a fallback that talks to the ContentsManager API, which may be
# inefficient, especially for large files.
#
# Local files-based ContentsManagers can use a StaticFileHandler subclass, which
# will be much more efficient.
#
# Access to these files should be Authenticated.
#c.ContentsManager.files_handler_class = 'notebook.files.handlers.FilesHandler'
## Extra parameters to pass to files_handler_class.
#
# For example, StaticFileHandlers generally expect a `path` argument specifying
# the root directory from which to serve files.
#c.ContentsManager.files_handler_params = {}
## Glob patterns to hide in file and directory listings.
#c.ContentsManager.hide_globs = ['__pycache__', '*.pyc', '*.pyo', '.DS_Store', '*.so', '*.dylib', '*~']
## Python callable or importstring thereof
#
# To be called on a contents model prior to save.
#
# This can be used to process the structure, such as removing notebook outputs
# or other side effects that should not be saved.
#
# It will be called as (all arguments passed by keyword)::
#
# hook(path=path, model=model, contents_manager=self)
#
# - model: the model to be saved. Includes file contents.
# Modifying this dict will affect the file that is stored.
# - path: the API path of the save destination
# - contents_manager: this ContentsManager instance
#c.ContentsManager.pre_save_hook = None
##
#c.ContentsManager.root_dir = '/'
## The base name used when creating untitled directories.
#c.ContentsManager.untitled_directory = 'Untitled Folder'
## The base name used when creating untitled files.
#c.ContentsManager.untitled_file = 'untitled'
## The base name used when creating untitled notebooks.
#c.ContentsManager.untitled_notebook = '未命名'
#------------------------------------------------------------------------------
# FileManagerMixin(Configurable) configuration
#------------------------------------------------------------------------------
## Mixin for ContentsAPI classes that interact with the filesystem.
#
# Provides facilities for reading, writing, and copying both notebooks and
# generic files.
#
# Shared by FileContentsManager and FileCheckpoints.
#
# Note ---- Classes using this mixin must provide the following attributes:
#
# root_dir : unicode
# A directory against which API-style paths are to be resolved.
#
# log : logging.Logger
## By default notebooks are saved on disk on a temporary file and then if
# successfully written, it replaces the old ones. This procedure, namely
# 'atomic_writing', causes some bugs on file system without operation order
# enforcement (like some networked fs). If set to False, the new notebook is
# written directly on the old one which could fail (eg: full filesystem or quota
# )
#c.FileManagerMixin.use_atomic_writing = True
#------------------------------------------------------------------------------
# FileContentsManager(FileManagerMixin,ContentsManager) configuration
#------------------------------------------------------------------------------
## If True (default), deleting files will send them to the platform's
# trash/recycle bin, where they can be recovered. If False, deleting files
# really deletes them.
#c.FileContentsManager.delete_to_trash = True
## Python callable or importstring thereof
#
# to be called on the path of a file just saved.
#
# This can be used to process the file on disk, such as converting the notebook
# to a script or HTML via nbconvert.
#
# It will be called as (all arguments passed by keyword)::
#
# hook(os_path=os_path, model=model, contents_manager=instance)
#
# - path: the filesystem path to the file just written - model: the model
# representing the file - contents_manager: this ContentsManager instance
#c.FileContentsManager.post_save_hook = None
##
#c.FileContentsManager.root_dir = ''
## DEPRECATED, use post_save_hook. Will be removed in Notebook 5.0
#c.FileContentsManager.save_script = False
#------------------------------------------------------------------------------
# NotebookNotary(LoggingConfigurable) configuration
#------------------------------------------------------------------------------
## A class for computing and verifying notebook signatures.
## The hashing algorithm used to sign notebooks.
#c.NotebookNotary.algorithm = 'sha256'
## The storage directory for notary secret and database.
#c.NotebookNotary.data_dir = ''
## The sqlite file in which to store notebook signatures. By default, this will
# be in your Jupyter data directory. You can set it to ':memory:' to disable
# sqlite writing to the filesystem.
#c.NotebookNotary.db_file = ''
## The secret key with which notebooks are signed.
#c.NotebookNotary.secret = b''
## The file where the secret key is stored.
#c.NotebookNotary.secret_file = ''
## A callable returning the storage backend for notebook signatures. The default
# uses an SQLite database.
#c.NotebookNotary.store_factory = traitlets.Undefined
#------------------------------------------------------------------------------
# AsyncMultiKernelManager(MultiKernelManager) configuration
#------------------------------------------------------------------------------
## The kernel manager class. This is configurable to allow subclassing of the
# AsyncKernelManager for customized behavior.
#c.AsyncMultiKernelManager.kernel_manager_class = 'jupyter_client.ioloop.AsyncIOLoopKernelManager'
## Whether to make kernels available before the process has started. The kernel
# has a `.ready` future which can be awaited before connecting
#c.AsyncMultiKernelManager.use_pending_kernels = False
#------------------------------------------------------------------------------
# AsyncMappingKernelManager(MappingKernelManager,AsyncMultiKernelManager) configuration
#------------------------------------------------------------------------------
#------------------------------------------------------------------------------
# GatewayKernelManager(AsyncMappingKernelManager) configuration
#------------------------------------------------------------------------------
## Kernel manager that supports remote kernels hosted by Jupyter Kernel or
# Enterprise Gateway.
#------------------------------------------------------------------------------
# GatewayKernelSpecManager(KernelSpecManager) configuration
#------------------------------------------------------------------------------
#------------------------------------------------------------------------------
# GatewayClient(SingletonConfigurable) configuration
#------------------------------------------------------------------------------
## This class manages the configuration. It's its own singleton class so that we
# can share these values across all objects. It also contains some helper
# methods to build request arguments out of the various config options.
## The authorization token used in the HTTP headers. (JUPYTER_GATEWAY_AUTH_TOKEN
# env var)
#c.GatewayClient.auth_token = None
## The filename of CA certificates or None to use defaults.
# (JUPYTER_GATEWAY_CA_CERTS env var)
#c.GatewayClient.ca_certs = None
## The filename for client SSL certificate, if any. (JUPYTER_GATEWAY_CLIENT_CERT
# env var)
#c.GatewayClient.client_cert = None
## The filename for client SSL key, if any. (JUPYTER_GATEWAY_CLIENT_KEY env var)
#c.GatewayClient.client_key = None
## The time allowed for HTTP connection establishment with the Gateway server.
# (JUPYTER_GATEWAY_CONNECT_TIMEOUT env var)
#c.GatewayClient.connect_timeout = 40.0
## A comma-separated list of environment variable names that will be included,
# along with their values, in the kernel startup request. The corresponding
# `env_whitelist` configuration value must also be set on the Gateway server -
# since that configuration value indicates which environmental values to make
# available to the kernel. (JUPYTER_GATEWAY_ENV_WHITELIST env var)
#c.GatewayClient.env_whitelist = ''
## The time allowed for HTTP reconnection with the Gateway server for the first
# time. Next will be JUPYTER_GATEWAY_RETRY_INTERVAL multiplied by two in factor
# of numbers of retries but less than JUPYTER_GATEWAY_RETRY_INTERVAL_MAX.
# (JUPYTER_GATEWAY_RETRY_INTERVAL env var)
#c.GatewayClient.gateway_retry_interval = 1.0
## The maximum time allowed for HTTP reconnection retry with the Gateway server.
# (JUPYTER_GATEWAY_RETRY_INTERVAL_MAX env var)
#c.GatewayClient.gateway_retry_interval_max = 30.0
## The maximum retries allowed for HTTP reconnection with the Gateway server.
# (JUPYTER_GATEWAY_RETRY_MAX env var)
#c.GatewayClient.gateway_retry_max = 5
## Additional HTTP headers to pass on the request. This value will be converted
# to a dict. (JUPYTER_GATEWAY_HEADERS env var)
#c.GatewayClient.headers = '{}'
## The password for HTTP authentication. (JUPYTER_GATEWAY_HTTP_PWD env var)
#c.GatewayClient.http_pwd = None
## The username for HTTP authentication. (JUPYTER_GATEWAY_HTTP_USER env var)
#c.GatewayClient.http_user = None
## The gateway API endpoint for accessing kernel resources
# (JUPYTER_GATEWAY_KERNELS_ENDPOINT env var)
#c.GatewayClient.kernels_endpoint = '/api/kernels'
## The gateway API endpoint for accessing kernelspecs
# (JUPYTER_GATEWAY_KERNELSPECS_ENDPOINT env var)
#c.GatewayClient.kernelspecs_endpoint = '/api/kernelspecs'
## The gateway endpoint for accessing kernelspecs resources
# (JUPYTER_GATEWAY_KERNELSPECS_RESOURCE_ENDPOINT env var)
#c.GatewayClient.kernelspecs_resource_endpoint = '/kernelspecs'
## The time allowed for HTTP request completion. (JUPYTER_GATEWAY_REQUEST_TIMEOUT
# env var)
#c.GatewayClient.request_timeout = 40.0
## The url of the Kernel or Enterprise Gateway server where kernel specifications
# are defined and kernel management takes place. If defined, this Notebook
# server acts as a proxy for all kernel management and kernel specification
# retrieval. (JUPYTER_GATEWAY_URL env var)
#c.GatewayClient.url = None
## For HTTPS requests, determines if server's certificate should be validated or
# not. (JUPYTER_GATEWAY_VALIDATE_CERT env var)
#c.GatewayClient.validate_cert = True
## The websocket url of the Kernel or Enterprise Gateway server. If not
# provided, this value will correspond to the value of the Gateway url with 'ws'
# in place of 'http'. (JUPYTER_GATEWAY_WS_URL env var)
#c.GatewayClient.ws_url = None
#------------------------------------------------------------------------------
# TerminalManager(LoggingConfigurable,NamedTermManager) configuration
#------------------------------------------------------------------------------
##
## Timeout (in seconds) in which a terminal has been inactive and ready to be
# culled. Values of 0 or lower disable culling.
#c.TerminalManager.cull_inactive_timeout = 0
## The interval (in seconds) on which to check for terminals exceeding the
# inactive timeout value.
#c.TerminalManager.cull_interval = 300
#修改配置
c.FileContentsManager.delete_to_trash = False
c.NotebookApp.notebook_dir = '/data/jupyter'
c.NotebookApp.ip = '*'
c.NotebookApp.allow_root = True
c.NotebookApp.open_browser = True
c.NotebookApp.password = u'sha1:4f96fe6d1725:2e8650de1b1b7dbae474c7de97b3b5484f2fe537'#输入上面加密后得到的密文

3
jupyter/conf/pip.conf Normal file
View File

@ -0,0 +1,3 @@
[global]
index-url = http://mirrors.tencentyun.com/pypi/simple
trusted-host = mirrors.tencentyun.com

25
jupyter/shell/build.sh Normal file
View File

@ -0,0 +1,25 @@
mkdir /data/jupyter
yum update -y && \
yum upgrade -y && \
yum install -y \
sudo \
wget && \
yum clean all && rm -rf /var/lib/apt/lists/*
cat ./config/miniconda.bashrc >> /root/.bashrc
wget -c https://repo.continuum.io/miniconda/Miniconda3-latest-Linux-x86_64.sh &&\
chmod 777 Miniconda3-latest-Linux-x86_64.sh && \
bash Miniconda3-latest-Linux-x86_64.sh -b -f && \
rm -f Miniconda3-latest-Linux-x86_64.sh && \
source ~/.bashrc
conda config --add channels https://mirrors.tuna.tsinghua.edu.cn/anaconda/pkgs/free/ &&\
conda config --set show\_channel\_urls yes &&\
conda install jupyter notebook -y && jupyter notebook --generate-config
cp ./config/pip.conf /root/.pip/pip.conf
cp ./config/jupyter_notebook_config.py /root/.jupyter/jupyter_notebook_config.py
cp ./config/jupyter.service /etc/systemd/system/jupyter.service
systemctl enable jupyter.service
systemctl start jupyter.service

6
jupyter/shell/start.sh Normal file
View File

@ -0,0 +1,6 @@
#!/bin/sh
source ~/.bashrc
echo "start success jupyter-notebook"
#nohub jupyter notebook --allow-root > /data/jupyter.log 2>&1
jupyter notebook
echo "exist jupyter-notebook ? why?"

26
miniconda/Dockerfile Normal file
View File

@ -0,0 +1,26 @@
FROM centos:7
USER root
ENV ROOT_DIR /root/docker
WORKDIR ${ROOT_DIR}
SHELL ["/bin/bash", "-o", "pipefail", "-c"]
RUN yum update -y && \
yum upgrade -y && \
yum install -y \
sudo \
git \
wget && \
yum clean all && rm -rf /var/lib/apt/lists/*
COPY tini ${ROOT_DIR}/tini
RUN cp ${ROOT_DIR}/tini /tini && chmod +x /tini
RUN wget -c https://repo.continuum.io/miniconda/Miniconda3-latest-Linux-x86_64.sh &&\
chmod 777 Miniconda3-latest-Linux-x86_64.sh
RUN bash Miniconda3-latest-Linux-x86_64.sh -b -f -p ${ROOT_DIR}/miniconda3 && \
rm -f Miniconda3-latest-Linux-x86_64.sh
ENV PATH=${ROOT_DIR}/miniconda3/bin:${PATH}
CMD tail -f /dev/null
COPY shell ${ROOT_DIR}
RUN cat ${ROOT_DIR}/miniconda.bashrc >> ~/.bashrc && source ~/.bashrc && bash shell/env.sh
ENTRYPOINT ["/tini", "--"] CMD

View File

@ -0,0 +1,18 @@
# yaml 配置
version: '3'
networks:
nginx:
external: false
services:
miniconda:
build: .
image: ouczbs:miniconda
container_name: miniconda
networks:
- nginx
ports:
- "9999:8888"
volumes:
- /root:/root
#- /root/docker_env:/root/docker/env
- /data:/data

3
miniconda/shell/env.sh Normal file
View File

@ -0,0 +1,3 @@
git config --global user.email ouczbs@qq.com
git config --global user.name ouczbs
git config --global push.default simple

View File

@ -0,0 +1,14 @@
# >>> conda initialize >>>
# !! Contents within this block are managed by 'conda init' !!
__conda_setup="$('/root/docker/miniconda3/bin/conda' 'shell.bash' 'hook' 2> /dev/null)"
if [ $? -eq 0 ]; then
eval "$__conda_setup"
else
if [ -f "/root/docker/miniconda3/etc/profile.d/conda.sh" ]; then
. "/root/docker/miniconda3/etc/profile.d/conda.sh"
else
export PATH="/root/docker/miniconda3/bin:$PATH"
fi
fi
unset __conda_setup
# <<< conda initialize <<<

BIN
miniconda/tini Normal file

Binary file not shown.

25
nginx/conf.d/anki.conf Normal file
View File

@ -0,0 +1,25 @@
server {
listen 27443 ssl;
server_name anki.ssl;
# Configuration managed by Certbot
ssl_certificate /etc/nginx/ssl/certificate.crt;
ssl_certificate_key /etc/nginx/ssl/private.key;
#include /etc/letsencrypt/options-ssl-nginx.conf;
location / {
proxy_http_version 1.0;
proxy_pass http://anki:27001/;
client_max_body_size 222M;
}
}
server {
listen 27001;
server_name anki;
location / {
proxy_http_version 1.0;
proxy_pass http://anki:27001/;
# proxy_set_header anki-original-size $http_Content_Length;
client_max_body_size 222M;
}
}

45
nginx/conf.d/default.conf Normal file
View File

@ -0,0 +1,45 @@
server {
listen 80;
listen [::]:80;
server_name localhost;
#access_log /var/log/nginx/host.access.log main;
location / {
root /usr/share/nginx/html;
index index.html index.htm;
}
#error_page 404 /404.html;
# redirect server error pages to the static page /50x.html
#
error_page 500 502 503 504 /50x.html;
location = /50x.html {
root /usr/share/nginx/html;
}
# proxy the PHP scripts to Apache listening on 127.0.0.1:80
#
#location ~ \.php$ {
# proxy_pass http://127.0.0.1;
#}
# pass the PHP scripts to FastCGI server listening on 127.0.0.1:9000
#
#location ~ \.php$ {
# root html;
# fastcgi_pass 127.0.0.1:9000;
# fastcgi_index index.php;
# fastcgi_param SCRIPT_FILENAME /scripts$fastcgi_script_name;
# include fastcgi_params;
#}
# deny access to .htaccess files, if Apache's document root
# concurs with nginx's one
#
#location ~ /\.ht {
# deny all;
#}
}

View File

@ -0,0 +1,3 @@
4AD9A9952F81E85A940C3EEA033DD0B263D9297571CCD22E4F1C25EF1C3C250A
comodoca.com
59c62c69bb8f050

19
nginx/html/50x.html Normal file
View File

@ -0,0 +1,19 @@
<!DOCTYPE html>
<html>
<head>
<title>Error</title>
<style>
html { color-scheme: light dark; }
body { width: 35em; margin: 0 auto;
font-family: Tahoma, Verdana, Arial, sans-serif; }
</style>
</head>
<body>
<h1>An error occurred.</h1>
<p>Sorry, the page you are looking for is currently unavailable.<br/>
Please try again later.</p>
<p>If you are the system administrator of this resource then you should check
the error log for details.</p>
<p><em>Faithfully yours, nginx.</em></p>
</body>
</html>

23
nginx/html/index.html Normal file
View File

@ -0,0 +1,23 @@
<!DOCTYPE html>
<html>
<head>
<title>Welcome to nginx!</title>
<style>
html { color-scheme: light dark; }
body { width: 35em; margin: 0 auto;
font-family: Tahoma, Verdana, Arial, sans-serif; }
</style>
</head>
<body>
<h1>Welcome to nginx!</h1>
<p>If you see this page, the nginx web server is successfully installed and
working. Further configuration is required.</p>
<p>For online documentation and support please refer to
<a href="http://nginx.org/">nginx.org</a>.<br/>
Commercial support is available at
<a href="http://nginx.com/">nginx.com</a>.</p>
<p><em>Thank you for using nginx.</em></p>
</body>
</html>

31
nginx/nginx.conf Normal file
View File

@ -0,0 +1,31 @@
user nginx;
worker_processes auto;
error_log /var/log/nginx/error.log notice;
pid /var/run/nginx.pid;
events {
worker_connections 1024;
}
http {
include /etc/nginx/mime.types;
default_type application/octet-stream;
log_format main '$remote_addr - $remote_user [$time_local] "$request" '
'$status $body_bytes_sent "$http_referer" '
'"$http_user_agent" "$http_x_forwarded_for"';
access_log /var/log/nginx/access.log main;
sendfile on;
#tcp_nopush on;
keepalive_timeout 65;
#gzip on;
include /etc/nginx/conf.d/*.conf;
}

39
nginx/ssl/ca_bundle.crt Normal file
View File

@ -0,0 +1,39 @@
-----BEGIN CERTIFICATE-----
MIIG1TCCBL2gAwIBAgIQbFWr29AHksedBwzYEZ7WvzANBgkqhkiG9w0BAQwFADCB
iDELMAkGA1UEBhMCVVMxEzARBgNVBAgTCk5ldyBKZXJzZXkxFDASBgNVBAcTC0pl
cnNleSBDaXR5MR4wHAYDVQQKExVUaGUgVVNFUlRSVVNUIE5ldHdvcmsxLjAsBgNV
BAMTJVVTRVJUcnVzdCBSU0EgQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkwHhcNMjAw
MTMwMDAwMDAwWhcNMzAwMTI5MjM1OTU5WjBLMQswCQYDVQQGEwJBVDEQMA4GA1UE
ChMHWmVyb1NTTDEqMCgGA1UEAxMhWmVyb1NTTCBSU0EgRG9tYWluIFNlY3VyZSBT
aXRlIENBMIICIjANBgkqhkiG9w0BAQEFAAOCAg8AMIICCgKCAgEAhmlzfqO1Mdgj
4W3dpBPTVBX1AuvcAyG1fl0dUnw/MeueCWzRWTheZ35LVo91kLI3DDVaZKW+TBAs
JBjEbYmMwcWSTWYCg5334SF0+ctDAsFxsX+rTDh9kSrG/4mp6OShubLaEIUJiZo4
t873TuSd0Wj5DWt3DtpAG8T35l/v+xrN8ub8PSSoX5Vkgw+jWf4KQtNvUFLDq8mF
WhUnPL6jHAADXpvs4lTNYwOtx9yQtbpxwSt7QJY1+ICrmRJB6BuKRt/jfDJF9Jsc
RQVlHIxQdKAJl7oaVnXgDkqtk2qddd3kCDXd74gv813G91z7CjsGyJ93oJIlNS3U
gFbD6V54JMgZ3rSmotYbz98oZxX7MKbtCm1aJ/q+hTv2YK1yMxrnfcieKmOYBbFD
hnW5O6RMA703dBK92j6XRN2EttLkQuujZgy+jXRKtaWMIlkNkWJmOiHmErQngHvt
iNkIcjJumq1ddFX4iaTI40a6zgvIBtxFeDs2RfcaH73er7ctNUUqgQT5rFgJhMmF
x76rQgB5OZUkodb5k2ex7P+Gu4J86bS15094UuYcV09hVeknmTh5Ex9CBKipLS2W
2wKBakf+aVYnNCU6S0nASqt2xrZpGC1v7v6DhuepyyJtn3qSV2PoBiU5Sql+aARp
wUibQMGm44gjyNDqDlVp+ShLQlUH9x8CAwEAAaOCAXUwggFxMB8GA1UdIwQYMBaA
FFN5v1qqK0rPVIDh2JvAnfKyA2bLMB0GA1UdDgQWBBTI2XhootkZaNU9ct5fCj7c
tYaGpjAOBgNVHQ8BAf8EBAMCAYYwEgYDVR0TAQH/BAgwBgEB/wIBADAdBgNVHSUE
FjAUBggrBgEFBQcDAQYIKwYBBQUHAwIwIgYDVR0gBBswGTANBgsrBgEEAbIxAQIC
TjAIBgZngQwBAgEwUAYDVR0fBEkwRzBFoEOgQYY/aHR0cDovL2NybC51c2VydHJ1
c3QuY29tL1VTRVJUcnVzdFJTQUNlcnRpZmljYXRpb25BdXRob3JpdHkuY3JsMHYG
CCsGAQUFBwEBBGowaDA/BggrBgEFBQcwAoYzaHR0cDovL2NydC51c2VydHJ1c3Qu
Y29tL1VTRVJUcnVzdFJTQUFkZFRydXN0Q0EuY3J0MCUGCCsGAQUFBzABhhlodHRw
Oi8vb2NzcC51c2VydHJ1c3QuY29tMA0GCSqGSIb3DQEBDAUAA4ICAQAVDwoIzQDV
ercT0eYqZjBNJ8VNWwVFlQOtZERqn5iWnEVaLZZdzxlbvz2Fx0ExUNuUEgYkIVM4
YocKkCQ7hO5noicoq/DrEYH5IuNcuW1I8JJZ9DLuB1fYvIHlZ2JG46iNbVKA3ygA
Ez86RvDQlt2C494qqPVItRjrz9YlJEGT0DrttyApq0YLFDzf+Z1pkMhh7c+7fXeJ
qmIhfJpduKc8HEQkYQQShen426S3H0JrIAbKcBCiyYFuOhfyvuwVCFDfFvrjADjd
4jX1uQXd161IyFRbm89s2Oj5oU1wDYz5sx+hoCuh6lSs+/uPuWomIq3y1GDFNafW
+LsHBU16lQo5Q2yh25laQsKRgyPmMpHJ98edm6y2sHUabASmRHxvGiuwwE25aDU0
2SAeepyImJ2CzB80YG7WxlynHqNhpE7xfC7PzQlLgmfEHdU+tHFeQazRQnrFkW2W
kqRGIq7cKRnyypvjPMkjeiV9lRdAM9fSJvsB3svUuu1coIG1xxI1yegoGM4r5QP4
RGIVvYaiI76C0djoSbQ/dkIUUXQuB8AL5jyH34g3BZaaXyvpmnV4ilppMXVAnAYG
ON51WhJ6W0xNdNJwzYASZYH+tmCWI+N60Gv2NNMGHwMZ7e9bXgzUCZH5FaBFDGR5
S9VWqHB73Q+OyIVvIbKYcSc2w/aSuFKGSA==
-----END CERTIFICATE-----

76
nginx/ssl/certificate.crt Normal file
View File

@ -0,0 +1,76 @@
-----BEGIN CERTIFICATE-----
MIIGYTCCBEmgAwIBAgIRAPOdfP8KPWbj7PaT3hIThhgwDQYJKoZIhvcNAQEMBQAw
SzELMAkGA1UEBhMCQVQxEDAOBgNVBAoTB1plcm9TU0wxKjAoBgNVBAMTIVplcm9T
U0wgUlNBIERvbWFpbiBTZWN1cmUgU2l0ZSBDQTAeFw0yMzA3MjgwMDAwMDBaFw0y
MzEwMjYyMzU5NTlaMBkxFzAVBgNVBAMTDjE3NS4yNC4yMjYuMTE0MIIBIjANBgkq
hkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEAnSFKy+RnaE08PZvstijR0QyLvHAJmo8G
wBpvgZCnHYexwROIfNCqXfANiP/fLRfOGEuF6xAS6+dF8jFNVLhKJgt3uFkxOmEH
iQzgU0WSuJerKIW+pOelq3sY+6i340lYyi6lStOwxJD99ZjH1BOS9rb6tS892giC
lj7zNZhYUMjmphIk2s5tC0/bII8PsivYki6AVbHGjUmj16S0ocw9ZmZgT6oddC3f
/OKfPXgGDkur+k1xZ1Tm7rj+Nkiy3OlhwF9DiAI5dvRL1Wfgmsfj9ReFCNd28WKH
kf6ChjDtyog2AOtDFI0/Ox7lONMNCcx31kasSyX1LlMYrJPIqQiYfQIDAQABo4IC
cDCCAmwwHwYDVR0jBBgwFoAUyNl4aKLZGWjVPXLeXwo+3LWGhqYwHQYDVR0OBBYE
FEfl7XXWxd8stOzi6tE7axcJMtfZMA4GA1UdDwEB/wQEAwIFoDAMBgNVHRMBAf8E
AjAAMB0GA1UdJQQWMBQGCCsGAQUFBwMBBggrBgEFBQcDAjBJBgNVHSAEQjBAMDQG
CysGAQQBsjEBAgJOMCUwIwYIKwYBBQUHAgEWF2h0dHBzOi8vc2VjdGlnby5jb20v
Q1BTMAgGBmeBDAECATCBiAYIKwYBBQUHAQEEfDB6MEsGCCsGAQUFBzAChj9odHRw
Oi8vemVyb3NzbC5jcnQuc2VjdGlnby5jb20vWmVyb1NTTFJTQURvbWFpblNlY3Vy
ZVNpdGVDQS5jcnQwKwYIKwYBBQUHMAGGH2h0dHA6Ly96ZXJvc3NsLm9jc3Auc2Vj
dGlnby5jb20wggEEBgorBgEEAdZ5AgQCBIH1BIHyAPAAdwCt9776fP8QyIudPZwe
PhhqtGcpXc+xDCTKhYY069yCigAAAYmcozZ8AAAEAwBIMEYCIQDMyC96A9+P8HbY
bIGM3u5fNse5Y/p+14Ikph3zBC9h2gIhAKyamlg3yEGQxYVBDcp2VGpCo1xQMMax
zj7pYXBaHZm9AHUAejKMVNi3LbYg6jjgUh7phBZwMhOFTTvSK8E6V6NS61IAAAGJ
nKM22AAABAMARjBEAiBljToFaTXUvqq67iNIx/l6nPNnM0c4H1YHhLQ7SObuhwIg
GozBAd529d20avuN35dBaN5spHg+7g1P9BYg14tVTHowDwYDVR0RBAgwBocErxji
cjANBgkqhkiG9w0BAQwFAAOCAgEANgJwO5t/uJ/kKTxZRZZv9u4C62U2EPq05ABZ
Zyry7F3/HnsepvmvQlSh4SYPu2CJq8oF9hn0+nC9yHxNeAwrjl/B7KpYjKGCBGc7
gGqMhbkQTB0d3khHTKV50T6WE6AMGODIAfClrbOdYr2oY6939MOcTmlsyeCCNlFu
aNZ/VFzIffL8ErphNu7Y/V3/YRZujOmOXJn4nryenrHikUysFwPld0fbMtUvmE7h
Yog9xzqVSFDLo0lUPc2km97DBrvqixWdGZ/12cqtlpR1g5NdPtw6CScFub7ZRAsi
SJIYD2fXj+6Q+fz31oRlGtLJQW6exwmPC6+kq+JA0no2gOw/5/bYRYu4gfxYCNk6
BqS+T1J9uthilITaRVCrvfUJ+/t2dj1tGAdnYQLLWF3QCAfO1xI7fgIldXik0HQz
z3PJY9jcMGuDNoPeJ89cLLJ4ywXXK/maMKyKI1/AD2g5flcM0BLaPpoO/Q4QoXCZ
P7YFmLF4hulZtkuu1V5WjG2nzfyA/Cd2rXn4k44f7QjV1Bcw4JqvXOj9+6yZQHr+
/MSOLf1yj0cXI43jliEzf/TtkI4zu1h8gJeNTQwDgjA4mxfp0L45Uu3YVFOG4W/f
Bt9FL3gRLYzJwDG93YO2OesyE5jOCl56oPDevmIHkbIsARqndRpvD7xZ/gg36anG
k0VFl1k=
-----END CERTIFICATE-----
-----BEGIN CERTIFICATE-----
MIIG1TCCBL2gAwIBAgIQbFWr29AHksedBwzYEZ7WvzANBgkqhkiG9w0BAQwFADCB
iDELMAkGA1UEBhMCVVMxEzARBgNVBAgTCk5ldyBKZXJzZXkxFDASBgNVBAcTC0pl
cnNleSBDaXR5MR4wHAYDVQQKExVUaGUgVVNFUlRSVVNUIE5ldHdvcmsxLjAsBgNV
BAMTJVVTRVJUcnVzdCBSU0EgQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkwHhcNMjAw
MTMwMDAwMDAwWhcNMzAwMTI5MjM1OTU5WjBLMQswCQYDVQQGEwJBVDEQMA4GA1UE
ChMHWmVyb1NTTDEqMCgGA1UEAxMhWmVyb1NTTCBSU0EgRG9tYWluIFNlY3VyZSBT
aXRlIENBMIICIjANBgkqhkiG9w0BAQEFAAOCAg8AMIICCgKCAgEAhmlzfqO1Mdgj
4W3dpBPTVBX1AuvcAyG1fl0dUnw/MeueCWzRWTheZ35LVo91kLI3DDVaZKW+TBAs
JBjEbYmMwcWSTWYCg5334SF0+ctDAsFxsX+rTDh9kSrG/4mp6OShubLaEIUJiZo4
t873TuSd0Wj5DWt3DtpAG8T35l/v+xrN8ub8PSSoX5Vkgw+jWf4KQtNvUFLDq8mF
WhUnPL6jHAADXpvs4lTNYwOtx9yQtbpxwSt7QJY1+ICrmRJB6BuKRt/jfDJF9Jsc
RQVlHIxQdKAJl7oaVnXgDkqtk2qddd3kCDXd74gv813G91z7CjsGyJ93oJIlNS3U
gFbD6V54JMgZ3rSmotYbz98oZxX7MKbtCm1aJ/q+hTv2YK1yMxrnfcieKmOYBbFD
hnW5O6RMA703dBK92j6XRN2EttLkQuujZgy+jXRKtaWMIlkNkWJmOiHmErQngHvt
iNkIcjJumq1ddFX4iaTI40a6zgvIBtxFeDs2RfcaH73er7ctNUUqgQT5rFgJhMmF
x76rQgB5OZUkodb5k2ex7P+Gu4J86bS15094UuYcV09hVeknmTh5Ex9CBKipLS2W
2wKBakf+aVYnNCU6S0nASqt2xrZpGC1v7v6DhuepyyJtn3qSV2PoBiU5Sql+aARp
wUibQMGm44gjyNDqDlVp+ShLQlUH9x8CAwEAAaOCAXUwggFxMB8GA1UdIwQYMBaA
FFN5v1qqK0rPVIDh2JvAnfKyA2bLMB0GA1UdDgQWBBTI2XhootkZaNU9ct5fCj7c
tYaGpjAOBgNVHQ8BAf8EBAMCAYYwEgYDVR0TAQH/BAgwBgEB/wIBADAdBgNVHSUE
FjAUBggrBgEFBQcDAQYIKwYBBQUHAwIwIgYDVR0gBBswGTANBgsrBgEEAbIxAQIC
TjAIBgZngQwBAgEwUAYDVR0fBEkwRzBFoEOgQYY/aHR0cDovL2NybC51c2VydHJ1
c3QuY29tL1VTRVJUcnVzdFJTQUNlcnRpZmljYXRpb25BdXRob3JpdHkuY3JsMHYG
CCsGAQUFBwEBBGowaDA/BggrBgEFBQcwAoYzaHR0cDovL2NydC51c2VydHJ1c3Qu
Y29tL1VTRVJUcnVzdFJTQUFkZFRydXN0Q0EuY3J0MCUGCCsGAQUFBzABhhlodHRw
Oi8vb2NzcC51c2VydHJ1c3QuY29tMA0GCSqGSIb3DQEBDAUAA4ICAQAVDwoIzQDV
ercT0eYqZjBNJ8VNWwVFlQOtZERqn5iWnEVaLZZdzxlbvz2Fx0ExUNuUEgYkIVM4
YocKkCQ7hO5noicoq/DrEYH5IuNcuW1I8JJZ9DLuB1fYvIHlZ2JG46iNbVKA3ygA
Ez86RvDQlt2C494qqPVItRjrz9YlJEGT0DrttyApq0YLFDzf+Z1pkMhh7c+7fXeJ
qmIhfJpduKc8HEQkYQQShen426S3H0JrIAbKcBCiyYFuOhfyvuwVCFDfFvrjADjd
4jX1uQXd161IyFRbm89s2Oj5oU1wDYz5sx+hoCuh6lSs+/uPuWomIq3y1GDFNafW
+LsHBU16lQo5Q2yh25laQsKRgyPmMpHJ98edm6y2sHUabASmRHxvGiuwwE25aDU0
2SAeepyImJ2CzB80YG7WxlynHqNhpE7xfC7PzQlLgmfEHdU+tHFeQazRQnrFkW2W
kqRGIq7cKRnyypvjPMkjeiV9lRdAM9fSJvsB3svUuu1coIG1xxI1yegoGM4r5QP4
RGIVvYaiI76C0djoSbQ/dkIUUXQuB8AL5jyH34g3BZaaXyvpmnV4ilppMXVAnAYG
ON51WhJ6W0xNdNJwzYASZYH+tmCWI+N60Gv2NNMGHwMZ7e9bXgzUCZH5FaBFDGR5
S9VWqHB73Q+OyIVvIbKYcSc2w/aSuFKGSA==
-----END CERTIFICATE-----

27
nginx/ssl/private.key Normal file
View File

@ -0,0 +1,27 @@
-----BEGIN RSA PRIVATE KEY-----
MIIEogIBAAKCAQEAnSFKy+RnaE08PZvstijR0QyLvHAJmo8GwBpvgZCnHYexwROI
fNCqXfANiP/fLRfOGEuF6xAS6+dF8jFNVLhKJgt3uFkxOmEHiQzgU0WSuJerKIW+
pOelq3sY+6i340lYyi6lStOwxJD99ZjH1BOS9rb6tS892giClj7zNZhYUMjmphIk
2s5tC0/bII8PsivYki6AVbHGjUmj16S0ocw9ZmZgT6oddC3f/OKfPXgGDkur+k1x
Z1Tm7rj+Nkiy3OlhwF9DiAI5dvRL1Wfgmsfj9ReFCNd28WKHkf6ChjDtyog2AOtD
FI0/Ox7lONMNCcx31kasSyX1LlMYrJPIqQiYfQIDAQABAoIBAAVIKDaAYAV+iybb
BfL9nkfzrwLPlVMPub9dZCAr16JDfHMsD+t6LC/qMNpJF5xxb2kbqM3FBY3OgWhp
xdqyMzPiMM0OmnwG5PTzbNr/vLXdy1AaZtUuQEBqa+WRv1Sxk32AYegJA6kn4pGN
6DG10rveXMEYbHDW1mTsispja5yTho8xOXhXjpavwhCo/qk0yNO8USGrZceCMtRv
9ILdHaktBpUATNEOtvbcrUivRbgFZp95kvuOFOQECzbpHOc23fG4SDekMbLalR0A
uxR4zIIeX9wKBVq7XMf7CF0DhA1M6c+tk+bGuuNuhBROgx99UvUOkZec9DZMNYz3
Jxjc68ECgYEA8PMN1FtAG3+HtTnkYNlkE9H8J1M48vVN+jILhD36cmHVMgcgRHbE
jUodpsvmfwTGx3KHSVbxyKwVB7Rbm6SFRWAnIFidM5IP1nz7o0jvGrREtmarWWbX
T7J3UInKmUdtmQZ+hjwUUourbjkMd7hAzzt70cyD0kqN0MK1FqOZHk0CgYEApvHo
smS1mpxb36od69THEUUAoJf3cCIJkWb9IaclSJoLG6/5ir4EkhROqBqVFTz77Zva
ybC+K9hSAvJf7E1zbnb4HZ294WZts1S47OHW0B5wu4ccRAioynv2P1gZgsh7g+zm
Vk+934kObFQwKfHbUs0MUVn3GkLkykaFGAwsWvECgYAjuKTwOqcCbKG96FtHUHNZ
zOPJILlsq5ln4kt6y4V5t69Xv161R7ZUDAzhUZFWe2rM3tnXsaAe1AQ6JC8AoB9x
7ZNfZEPKBXl7nwPfbUyldNxK/mWg13JMSnUN55+1obVJ3oCbfgS1SCeeK68BgLt6
/aKZs+xHHO5xaKxYMuDccQKBgGWz17PeAHhpKtsuMr45yPAvXjt3eMk6IyQOXsVC
o0cAeSXr5ZTtP7GSZ3ynfQ0MMtnE6duXLbNePFAJQgCeCmVQWsUIFOYc1VW7llDR
IGF3Oew0WjbkI5f/PrvGOvxekEiFaKlKOPzV7RB3lUPoUFm4CvoAWbYI7IKBxZEp
0VexAoGAJltL5eu3yrcXRO5iCThT3DYsmqR16JpaMyFDtLdDw1fE3wU44+bLjQZs
napP5bwl+uEwPTkbdSVHzlDYQkw9eosmPKQaediFfhA4naOieRlsI03cIShTdKjc
KsHSDmOkl89MAcU0PZXaQju6heAHsOjX6OghaKhJ7QBsj7BQqLg=
-----END RSA PRIVATE KEY-----

79
readme.ipynb Normal file
View File

@ -0,0 +1,79 @@
{
"cells": [
{
"cell_type": "code",
"execution_count": null,
"id": "56638891",
"metadata": {},
"outputs": [],
"source": [
"#build\n",
"docker build -t miniconda:ouczbs . \n",
"docker build -t centos:ouczbs ."
]
},
{
"cell_type": "code",
"execution_count": null,
"id": "5b0a7431",
"metadata": {},
"outputs": [],
"source": [
"#run\n",
"docker run -p 80:80 -it gitlab:ouczbs bash\n",
"docker run -p 8888:8888 -v /data:/data -d --restart=always miniconda:ouczbs\n",
"docker run -p 8888:8888 -it miniconda:ouczbs bash\n",
"docker run -p 8888:8888 -it --restart=always miniconda:ouczbs bash\n",
"docker run -p 8888:8888 -it --restart=always miniconda:ouczbs source ~/.bashrc\n",
" \n",
"docker run --detach \\\n",
" --name hostname \\\n",
" --hostname 175.24.226.114 \\\n",
" -p 1443:443 \\\n",
" -p 180:80 \\\n",
" -p 122:22 \\\n",
" -v /home/docker/gitlab/config:/etc/gitlab \\\n",
" -v /home/docker/gitlab/logs:/var/log/gitlab \\\n",
" -v /home/docker/gitlab/data:/var/opt/gitlab \\\n",
" --restart always \\\n",
" gitlab/gitlab-ce:latest\n",
" \n",
"docker run --name jupyter-rust -d -p 8899:8899 -v `pwd`:/opt/notebooks junsuzuki/jupyter-rust"
]
},
{
"cell_type": "code",
"execution_count": null,
"id": "4b4f22ab",
"metadata": {},
"outputs": [],
"source": [
"#rm\n",
"docker rm -v $(docker ps -aq -f status=exited)\n",
"docker system prune -a\n",
"docker images|grep none|awk '{print $3}'|xargs docker rmi "
]
}
],
"metadata": {
"kernelspec": {
"display_name": "Python 3 (ipykernel)",
"language": "python",
"name": "python3"
},
"language_info": {
"codemirror_mode": {
"name": "ipython",
"version": 3
},
"file_extension": ".py",
"mimetype": "text/x-python",
"name": "python",
"nbconvert_exporter": "python",
"pygments_lexer": "ipython3",
"version": "3.11.4"
}
},
"nbformat": 4,
"nbformat_minor": 5
}

22
rust/Dockerfile Normal file
View File

@ -0,0 +1,22 @@
FROM --platform=linux/amd64 rust:1.65.0
# System packages
ENV ROOT_DIR /root/docker
COPY --from=ouczbs:miniconda ${ROOT_DIR}/miniconda3 ${ROOT_DIR}/miniconda3
# configure & update conda
USER root
ENV PATH=${ROOT_DIR}/miniconda3/bin:${PATH}
COPY conf/pip.conf root/pip.conf
RUN conda init bash && \
conda update -y conda && \
conda install -c anaconda cmake -y && \
conda install -y nb_conda_kernels jupyterlab
# install evcxr_jupyter
## by setting GIT_FETCH_WITH_CLI option,
## Cargo will use the git executable to fetch registry indexes and git dependencies
COPY conf/config.conf root/.cargo/config
#RUN cargo install evcxr_jupyter && evcxr_jupyter --install
#CMD ["jupyter", "lab", "--ip=0.0.0.0", "--port=8899", "--notebook-dir=/opt/notebooks", "--allow-root", "--no-browser"]
CMD tail -f /dev/null

13
rust/conf/config.conf Normal file
View File

@ -0,0 +1,13 @@
[source.crates-io]
registry = "https://github.com/rust-lang/crates.io-index"
replace-with = 'tuna'
[source.tuna]
registry = "https://mirrors.tuna.tsinghua.edu.cn/git/crates.io-index.git"
#replace-with = 'ustc'
#[source.ustc]
#registry = "git://mirrors.ustc.edu.cn/crates.io-index"
[net]
git-fetch-with-cli = true

3
rust/conf/pip.conf Normal file
View File

@ -0,0 +1,3 @@
[global]
index-url = http://mirrors.tencentyun.com/pypi/simple
trusted-host = mirrors.tencentyun.com

13
rust/docker-compose.yml Normal file
View File

@ -0,0 +1,13 @@
# rust:
# build: ./rust
# image: ouczbs:jupyter-rust
# container_name: rust
# restart: always
# ports:
# - "8899:8899"
# networks:
# - nginx
# volumes:
# - /root/docker:/opt/notebooks

4
ssh/Dockerfile Normal file
View File

@ -0,0 +1,4 @@
FROM centos:7
RUN yum install certbot
RUN certbot register -m ouczbs@qq.com --agree-tos -y
#need dns server

1
ssh/authorized_keys Normal file
View File

@ -0,0 +1 @@
ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAABgQDO+7XyE5l4AwyVqWSE7Z4xyfdcyJfoj/Oebx7qCRKP3qFotRazWgW23ecNKl837klVLR1VKDIXWRDFY+zE9gZbKPrIlxJ9hqk9HvZhppBmbbtazPXsA/ZlSdqD5n0mrmxDm6WJOZ75MHRwHUA8Uy0p8Sd1rbqna/gNOwHMwlfo7974Fq2BX8h3FpQ5Cggrr4EJY5IpWO4xgpSy95tRX8GIqdNxMWQWdODdI1atFBxkbfQr2Cy4GmxIy8hGDQAF5AXu06cQsyyVkL9odtFLMDBkhxIo55PfYyWfQ6zkfAYOeDrJs0LbCKickbsy0Vavrvz8NZbcjx6117Wg6SNQ1o9x0NR/gxMeF5/MT+F/tPi7tlXxhjyHnCjeMsxjoWa3TTQ6i0/VQIBbjcdQi9QK4HYDhaHrRKhqqqGD21HCthuzGVr+AWJohc0XYqSWDgUTMFY7aQheEEgoWrIMW2aBzdfnF8sHR23aWSMDX2ExV5pHKWgEzDoDlf8q9X/g4EKzboM= ouczbs@qq.com

1
ssh/id_rsa.pub Normal file
View File

@ -0,0 +1 @@
ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAABgQDO+7XyE5l4AwyVqWSE7Z4xyfdcyJfoj/Oebx7qCRKP3qFotRazWgW23ecNKl837klVLR1VKDIXWRDFY+zE9gZbKPrIlxJ9hqk9HvZhppBmbbtazPXsA/ZlSdqD5n0mrmxDm6WJOZ75MHRwHUA8Uy0p8Sd1rbqna/gNOwHMwlfo7974Fq2BX8h3FpQ5Cggrr4EJY5IpWO4xgpSy95tRX8GIqdNxMWQWdODdI1atFBxkbfQr2Cy4GmxIy8hGDQAF5AXu06cQsyyVkL9odtFLMDBkhxIo55PfYyWfQ6zkfAYOeDrJs0LbCKickbsy0Vavrvz8NZbcjx6117Wg6SNQ1o9x0NR/gxMeF5/MT+F/tPi7tlXxhjyHnCjeMsxjoWa3TTQ6i0/VQIBbjcdQi9QK4HYDhaHrRKhqqqGD21HCthuzGVr+AWJohc0XYqSWDgUTMFY7aQheEEgoWrIMW2aBzdfnF8sHR23aWSMDX2ExV5pHKWgEzDoDlf8q9X/g4EKzboM= ouczbs@qq.com