enable_chunked_transfer_encoding_without_browser

This commit is contained in:
dobefore 2021-09-08 19:51:16 +08:00 committed by Vikash Kothary
parent 251df33e4e
commit 1c21733ef2
3 changed files with 211 additions and 110 deletions

View File

@ -41,6 +41,21 @@ class Syncer(object):
self.col = col
self.server = server
# new added functions related to Syncer:
# these are removed from latest anki module
########################################################################
def scm(self):
"""return schema"""
scm=self.col.db.scalar("select scm from col")
return scm
def increment_usn(self):
"""usn+1 in db"""
self.col.db.execute("update col set usn = usn + 1")
def set_modified_time(self,now:int):
self.col.db.execute("update col set mod=?", now)
def set_last_sync(self,now:int):
self.col.db.execute("update col set ls = ?", now)
#########################################################################
def meta(self):
return dict(
mod=self.col.mod,
@ -66,7 +81,7 @@ class Syncer(object):
# then the other objects
self.mergeModels(rchg['models'])
self.mergeDecks(rchg['decks'])
self.mergeTags(rchg['tags'])
# self.mergeTags(rchg['tags'])
if 'conf' in rchg:
self.mergeConf(rchg['conf'])
# this was left out of earlier betas
@ -105,25 +120,25 @@ select id from notes where mid = ?) limit 1"""
return False
return True
def sanityCheck(self, full):
if not self.basicCheck():
return "failed basic check"
for t in "cards", "notes", "revlog", "graves":
if self.col.db.scalar(
"select count() from %s where usn = -1" % t):
return "%s had usn = -1" % t
for g in self.col.decks.all():
if g['usn'] == -1:
return "deck had usn = -1"
for t, usn in self.allItems():
if usn == -1:
return "tag had usn = -1"
found = False
for m in self.col.models.all():
if m['usn'] == -1:
return "model had usn = -1"
if found:
self.col.models.save()
def sanityCheck(self):
# basicCheck() seems to have no effect on this procedure,
# if necessary remove comment
# if not self.basicCheck():
# return "failed basic check"
tables=["cards",
"notes",
"revlog",
"graves",
"decks",
"deck_config",
"tags",
"notetypes",
]
for tb in tables:
print(self.col.db.scalar(f'select null from {tb} where usn=-1'))
if self.col.db.scalar(f'select null from {tb} where usn=-1'):
return f'table had usn=-1: {tb}'
self.col.sched.reset()
# check for missing parent decks
#self.col.sched.deckDueList()
@ -142,13 +157,15 @@ select id from notes where mid = ?) limit 1"""
def usnLim(self):
return "usn = -1"
def finish(self, mod=None):
self.col.ls = mod
self.col._usn = self.maxUsn + 1
def finish(self, now=None):
if now is not None:
# ensure we save the mod time even if no changes made
self.col.db.mod = True
self.col.save(mod=mod)
return mod
self.set_modified_time(now)
self.set_last_sync(now)
self.increment_usn()
self.col.save()
# now is None not happen
return now
# Chunked syncing
##########################################################################
@ -195,67 +212,26 @@ from notes where %s""" % lim, self.maxUsn)
# Deletions
##########################################################################
def removed(self):
cards = []
notes = []
decks = []
curs = self.col.db.execute(
"select oid, type from graves where usn = -1")
for oid, type in curs:
if type == REM_CARD:
cards.append(oid)
elif type == REM_NOTE:
notes.append(oid)
else:
decks.append(oid)
self.col.db.execute("update graves set usn=? where usn=-1",
self.maxUsn)
return dict(cards=cards, notes=notes, decks=decks)
def remove(self, graves):
# remove card and the card's orphaned notes
def add_grave(self, ids: List[int], type: int,usn: int):
items=[(id,type,usn) for id in ids]
# make sure table graves fields order and schema version match
# query sql1='pragma table_info(graves)' version query schema='select ver from col'
self.col.db.executemany(
"INSERT OR IGNORE INTO graves (oid, type, usn) VALUES (?, ?, ?)" ,
items)
def apply_graves(self, graves,latest_usn: int):
# remove card and the card's orphaned notes
self.col.remove_cards_and_orphaned_notes(graves['cards'])
self.add_grave(graves['cards'], REM_CARD,latest_usn)
# only notes
self.col.remove_notes(graves['notes'])
self.add_grave(graves['notes'], REM_NOTE,latest_usn)
# since level 0 deck ,we only remove deck ,but backend will delete child,it is ok, the delete
# will have once effect
for oid in graves['decks']:
self.col.decks.rem(oid)
# we can place non-exist grave after above delete.
localgcards = []
localgnotes = []
localgdecks = []
curs = self.col.db.execute(
"select oid, type from graves where usn = %d" % self.col.usn())
for oid, type in curs:
if type == REM_CARD:
localgcards.append(oid)
elif type == REM_NOTE:
localgnotes.append(oid)
else:
localgdecks.append(oid)
# n meaning non-exsiting grave in the server compared to client
ncards = [ oid for oid in graves['cards'] if oid not in localgcards]
for oid in ncards:
self.col._logRem([oid], REM_CARD)
nnotes = [ oid for oid in graves['notes'] if oid not in localgnotes]
for oid in nnotes:
self.col._logRem([oid], REM_NOTE)
ndecks = [ oid for oid in graves['decks'] if oid not in localgdecks]
for oid in ndecks:
self.col._logRem([oid], REM_DECK)
self.col.decks.remove(graves['decks'])
self.add_grave(graves['decks'], REM_DECK,latest_usn)
# Models
##########################################################################

View File

@ -33,11 +33,12 @@ from sqlite3 import dbapi2 as sqlite
from webob import Response
from webob.dec import wsgify
from webob.exc import *
import urllib.parse
from anki.collection import Collection
import anki.db
import anki.utils
from anki.consts import REM_CARD, REM_NOTE
from ankisyncd.full_sync import get_full_sync_manager
from ankisyncd.sessions import get_session_manager
from ankisyncd.sync import Syncer, SYNC_VER, SYNC_ZIP_SIZE, SYNC_ZIP_COUNT
@ -97,8 +98,8 @@ class SyncCollectionHandler(Syncer):
return {
'mod': self.col.mod,
'scm': self.col.scm,
'usn': self.col._usn,
'scm': self.scm(),
'usn': self.col.usn(),
'ts': anki.utils.intTime(),
'musn': self.col.media.lastUsn(),
'uname': self.session.name,
@ -117,19 +118,20 @@ class SyncCollectionHandler(Syncer):
# Since now have not thorougly test the V2 scheduler, we leave this comments here, and
# just enable the V2 scheduler in the serve code.
self.maxUsn = self.col._usn
self.maxUsn = self.col.usn()
self.minUsn = minUsn
self.lnewer = not lnewer
# fetch local/server graves
lgraves = self.removed()
# convert grave:None to {'cards': [], 'notes': [], 'decks': []}
# because req.POST['data'] returned value of grave is None
if graves==None:
graves={'cards': [], 'notes': [], 'decks': []}
self.remove(graves)
# handle AnkiDroid using old protocol
# Only if Operations like deleting deck are performed on Ankidroid
# can (client) graves is not None
if graves is not None:
self.apply_graves(graves,self.maxUsn)
return lgraves
def applyGraves(self, chunk):
self.remove(chunk)
self.apply_graves(chunk,self.maxUsn)
def applyChanges(self, changes):
self.rchg = changes
@ -138,8 +140,8 @@ class SyncCollectionHandler(Syncer):
self.mergeChanges(lchg, self.rchg)
return lchg
def sanityCheck2(self, client, full=None):
server = self.sanityCheck(full)
def sanityCheck2(self, client):
server = self.sanityCheck()
if client != server:
logger.info(
f"sanity check failed with server: {server} client: {client}"
@ -148,7 +150,7 @@ class SyncCollectionHandler(Syncer):
return dict(status="bad", c=client, s=server)
return dict(status="ok")
def finish(self, mod=None):
def finish(self):
return super().finish(anki.utils.intTime(1000))
# This function had to be put here in its entirety because Syncer.removed()
@ -178,7 +180,7 @@ class SyncCollectionHandler(Syncer):
def getDecks(self):
return [
[g for g in self.col.decks.all() if g['usn'] >= self.minUsn],
[g for g in self.col.decks.allConf() if g['usn'] >= self.minUsn]
[g for g in self.col.decks.all_config() if g['usn'] >= self.minUsn]
]
def getTags(self):
@ -338,7 +340,6 @@ class SyncMediaHandler:
if lastUsn < server_lastUsn or lastUsn == 0:
for fname,usn,csum, in self.col.media.changes(lastUsn):
result.append([fname, usn, csum])
# anki assumes server_lastUsn == result[-1][1]
# ref: anki/sync.py:720 (commit cca3fcb2418880d0430a5c5c2e6b81ba260065b7)
result.reverse()
@ -394,7 +395,125 @@ class SyncUserSession:
# for inactivity and then later re-open it (creating a new Collection object).
handler.col = col
return handler
class Requests(object):
'''parse request message from client'''
def __init__(self,environ: dict):
self.query_string=environ['QUERY_STRING']
self.environ=environ
self.data=None
@property
def path(self):
return self.environ['PATH_INFO']
@property
def parse_request(self):
'''Return a MultiDict containing all the variables from a form
request.'''
env = self.environ
content_len= env.get('CONTENT_LENGTH', '0')
input = env.get('wsgi.input')
length = 0 if content_len == '' else int(content_len)
body=b''
d={}
if length == 0:
if input is None:
return
if env.get('HTTP_TRANSFER_ENCODING','0') == 'chunked':
bd=b''
size = int(input.readline(),16)
while size > 0:
bd += (input.read(size+2)).strip()
size = int(input.readline(),16)
repeat=re.findall(b'^(.*?)Content-Disposition: form-data; name="data"',bd,re.MULTILINE)
items=re.split(repeat,bd)
# del first ,last item
items.pop()
items.pop(0)
for item in items:
if b'name="data"' in item:
dt=item.strip(b'Content-Disposition: form-data; name="data"; filename="data"')
d['data']=dt
continue
key=re.findall(b'name="(.*?)"',item)[0].decode('utf-8')
v=item[item.rfind(b'"')+1:].decode('utf-8')
d[key]=v
return d
if self.query_string !='':
# GET method
body=self.query_string
d=urllib.parse.parse_qs(body)
for k,v in d.items():
d[k]=''.join(v)
return d
# request server with web server
if self.path=='/' :
d= {'url':b'Anki Sync Server'}
return d
if self.path=='/favicon.ico' :
d= {'url':b''}
return d
else:
body = env['wsgi.input'].read(length)
if body is None or body ==b'':
return 'empty body'
# process body to dict
repeat=body.splitlines()[0]
items=re.split(repeat,body)
# del first ,last item
items.pop()
items.pop(0)
for item in items:
if b'name="data"' in item:
bt=None
# remove \r\n
if b'application/octet-stream' in item:
# Ankidroid case
item=re.sub(b'Content-Disposition: form-data; name="data"; filename="data"',b'',item)
item=re.sub(b'Content-Type: application/octet-stream',b'',item)
bt=item.strip()
else:
# PKzip file stream and others
item=re.sub(b'Content-Disposition: form-data; name="data"; filename="data"',b'',item)
bt=item.strip()
d['data']=bt
continue
item=re.sub(b'\r\n',b'',item,flags=re.MULTILINE)
key=re.findall(b'name="(.*?)"',item)[0].decode('utf-8')
v=item[item.rfind(b'"')+1:].decode('utf-8')
d[key]=v
return d
@property
def params(self):
"""
A dictionary-like object containing both the parameters from
the query string and request body.
"""
r=self.parse_request
if r is None :
return 'POST or GET is None'
else:
params = MultiDict(r)
return params
class MultiDict(object):
def __init__(self, *dicts):
for d in dicts:
if not isinstance(d,dict):
raise TypeError(d)
self.dicts=dicts
def __getitem__(self,key):
for d in self.dicts:
try:
value = d[key]
return value
except KeyError:
raise KeyError(key)
class SyncApp:
valid_urls = SyncCollectionHandler.operations + SyncMediaHandler.operations + ['hostKey', 'upload', 'download']
@ -467,11 +586,12 @@ class SyncApp:
# local copy in Anki
return self.full_sync_manager.download(col, session)
@wsgify
def __call__(self, req):
def __call__(self, env,start_resp):
req=Requests(env)
p=req.params
# Get and verify the session
try:
hkey = req.params['k']
hkey = p['k']
except KeyError:
hkey = None
@ -479,18 +599,18 @@ class SyncApp:
if session is None:
try:
skey = req.POST['sk']
skey = p['sk']
session = self.session_manager.load_from_skey(skey, self.create_session)
except KeyError:
skey = None
try:
compression = int(req.POST['c'])
compression = int(p['c'])
except KeyError:
compression = 0
try:
data = req.POST['data'].file.read()
data = p['data']
data = self._decode_data(data, compression)
except KeyError:
data = {}
@ -503,7 +623,8 @@ class SyncApp:
if url == 'hostKey':
result = self.operation_hostKey(data.get("u"), data.get("p"))
if result:
return json.dumps(result)
resp=Response(json.dumps(result))
return resp(env,start_resp)
else:
# TODO: do I have to pass 'null' for the client to receive None?
raise HTTPForbidden('null')
@ -529,17 +650,20 @@ class SyncApp:
if type(result) not in (str, bytes, Response):
result = json.dumps(result)
return result
resp=Response(result)
return resp(env,start_resp)
elif url == 'upload':
thread = session.get_thread()
result = thread.execute(self.operation_upload, [data['data'], session])
return result
resp=Response(json.dumps(result))
return resp(env,start_resp)
elif url == 'download':
thread = session.get_thread()
result = thread.execute(self.operation_download, [session])
return result
resp=Response(result)
return resp(env,start_resp)
# This was one of our operations but it didn't get handled... Oops!
raise HTTPInternalServerError()
@ -563,9 +687,10 @@ class SyncApp:
if type(result) not in (str, bytes):
result = json.dumps(result)
return result
return "Anki Sync Server"
resp=Response(result)
return resp(env,start_resp)
resp=Response(p['url'])
return resp(env,start_resp)
@staticmethod
def _execute_handler_method_in_thread(method_name, keyword_args, session):
@ -631,5 +756,5 @@ def main():
finally:
shutdown()
if __name__ == '__main__':
if __name__ == '__main__':
main()

0
src/ankisyncd_cli/migrate_user_tables.py Normal file → Executable file
View File