enable_chunked_transfer_encoding_without_browser

This commit is contained in:
dobefore 2021-09-08 19:51:16 +08:00 committed by Vikash Kothary
parent 251df33e4e
commit 1c21733ef2
3 changed files with 211 additions and 110 deletions

View File

@ -41,6 +41,21 @@ class Syncer(object):
self.col = col self.col = col
self.server = server self.server = server
# new added functions related to Syncer:
# these are removed from latest anki module
########################################################################
def scm(self):
"""return schema"""
scm=self.col.db.scalar("select scm from col")
return scm
def increment_usn(self):
"""usn+1 in db"""
self.col.db.execute("update col set usn = usn + 1")
def set_modified_time(self,now:int):
self.col.db.execute("update col set mod=?", now)
def set_last_sync(self,now:int):
self.col.db.execute("update col set ls = ?", now)
#########################################################################
def meta(self): def meta(self):
return dict( return dict(
mod=self.col.mod, mod=self.col.mod,
@ -66,7 +81,7 @@ class Syncer(object):
# then the other objects # then the other objects
self.mergeModels(rchg['models']) self.mergeModels(rchg['models'])
self.mergeDecks(rchg['decks']) self.mergeDecks(rchg['decks'])
self.mergeTags(rchg['tags']) # self.mergeTags(rchg['tags'])
if 'conf' in rchg: if 'conf' in rchg:
self.mergeConf(rchg['conf']) self.mergeConf(rchg['conf'])
# this was left out of earlier betas # this was left out of earlier betas
@ -105,25 +120,25 @@ select id from notes where mid = ?) limit 1"""
return False return False
return True return True
def sanityCheck(self, full): def sanityCheck(self):
if not self.basicCheck(): # basicCheck() seems to have no effect on this procedure,
return "failed basic check" # if necessary remove comment
for t in "cards", "notes", "revlog", "graves": # if not self.basicCheck():
if self.col.db.scalar( # return "failed basic check"
"select count() from %s where usn = -1" % t): tables=["cards",
return "%s had usn = -1" % t "notes",
for g in self.col.decks.all(): "revlog",
if g['usn'] == -1: "graves",
return "deck had usn = -1" "decks",
for t, usn in self.allItems(): "deck_config",
if usn == -1: "tags",
return "tag had usn = -1" "notetypes",
found = False ]
for m in self.col.models.all(): for tb in tables:
if m['usn'] == -1: print(self.col.db.scalar(f'select null from {tb} where usn=-1'))
return "model had usn = -1" if self.col.db.scalar(f'select null from {tb} where usn=-1'):
if found: return f'table had usn=-1: {tb}'
self.col.models.save()
self.col.sched.reset() self.col.sched.reset()
# check for missing parent decks # check for missing parent decks
#self.col.sched.deckDueList() #self.col.sched.deckDueList()
@ -142,13 +157,15 @@ select id from notes where mid = ?) limit 1"""
def usnLim(self): def usnLim(self):
return "usn = -1" return "usn = -1"
def finish(self, mod=None): def finish(self, now=None):
self.col.ls = mod if now is not None:
self.col._usn = self.maxUsn + 1
# ensure we save the mod time even if no changes made # ensure we save the mod time even if no changes made
self.col.db.mod = True self.set_modified_time(now)
self.col.save(mod=mod) self.set_last_sync(now)
return mod self.increment_usn()
self.col.save()
# now is None not happen
return now
# Chunked syncing # Chunked syncing
########################################################################## ##########################################################################
@ -195,67 +212,26 @@ from notes where %s""" % lim, self.maxUsn)
# Deletions # Deletions
########################################################################## ##########################################################################
def removed(self): def add_grave(self, ids: List[int], type: int,usn: int):
cards = [] items=[(id,type,usn) for id in ids]
notes = [] # make sure table graves fields order and schema version match
decks = [] # query sql1='pragma table_info(graves)' version query schema='select ver from col'
self.col.db.executemany(
curs = self.col.db.execute( "INSERT OR IGNORE INTO graves (oid, type, usn) VALUES (?, ?, ?)" ,
"select oid, type from graves where usn = -1") items)
for oid, type in curs: def apply_graves(self, graves,latest_usn: int):
if type == REM_CARD: # remove card and the card's orphaned notes
cards.append(oid)
elif type == REM_NOTE:
notes.append(oid)
else:
decks.append(oid)
self.col.db.execute("update graves set usn=? where usn=-1",
self.maxUsn)
return dict(cards=cards, notes=notes, decks=decks)
def remove(self, graves):
# remove card and the card's orphaned notes
self.col.remove_cards_and_orphaned_notes(graves['cards']) self.col.remove_cards_and_orphaned_notes(graves['cards'])
self.add_grave(graves['cards'], REM_CARD,latest_usn)
# only notes # only notes
self.col.remove_notes(graves['notes']) self.col.remove_notes(graves['notes'])
self.add_grave(graves['notes'], REM_NOTE,latest_usn)
# since level 0 deck ,we only remove deck ,but backend will delete child,it is ok, the delete # since level 0 deck ,we only remove deck ,but backend will delete child,it is ok, the delete
# will have once effect # will have once effect
for oid in graves['decks']: self.col.decks.remove(graves['decks'])
self.col.decks.rem(oid) self.add_grave(graves['decks'], REM_DECK,latest_usn)
# we can place non-exist grave after above delete.
localgcards = []
localgnotes = []
localgdecks = []
curs = self.col.db.execute(
"select oid, type from graves where usn = %d" % self.col.usn())
for oid, type in curs:
if type == REM_CARD:
localgcards.append(oid)
elif type == REM_NOTE:
localgnotes.append(oid)
else:
localgdecks.append(oid)
# n meaning non-exsiting grave in the server compared to client
ncards = [ oid for oid in graves['cards'] if oid not in localgcards]
for oid in ncards:
self.col._logRem([oid], REM_CARD)
nnotes = [ oid for oid in graves['notes'] if oid not in localgnotes]
for oid in nnotes:
self.col._logRem([oid], REM_NOTE)
ndecks = [ oid for oid in graves['decks'] if oid not in localgdecks]
for oid in ndecks:
self.col._logRem([oid], REM_DECK)
# Models # Models
########################################################################## ##########################################################################

View File

@ -33,11 +33,12 @@ from sqlite3 import dbapi2 as sqlite
from webob import Response from webob import Response
from webob.dec import wsgify from webob.dec import wsgify
from webob.exc import * from webob.exc import *
import urllib.parse
from anki.collection import Collection
import anki.db import anki.db
import anki.utils import anki.utils
from anki.consts import REM_CARD, REM_NOTE from anki.consts import REM_CARD, REM_NOTE
from ankisyncd.full_sync import get_full_sync_manager from ankisyncd.full_sync import get_full_sync_manager
from ankisyncd.sessions import get_session_manager from ankisyncd.sessions import get_session_manager
from ankisyncd.sync import Syncer, SYNC_VER, SYNC_ZIP_SIZE, SYNC_ZIP_COUNT from ankisyncd.sync import Syncer, SYNC_VER, SYNC_ZIP_SIZE, SYNC_ZIP_COUNT
@ -97,8 +98,8 @@ class SyncCollectionHandler(Syncer):
return { return {
'mod': self.col.mod, 'mod': self.col.mod,
'scm': self.col.scm, 'scm': self.scm(),
'usn': self.col._usn, 'usn': self.col.usn(),
'ts': anki.utils.intTime(), 'ts': anki.utils.intTime(),
'musn': self.col.media.lastUsn(), 'musn': self.col.media.lastUsn(),
'uname': self.session.name, 'uname': self.session.name,
@ -117,19 +118,20 @@ class SyncCollectionHandler(Syncer):
# Since now have not thorougly test the V2 scheduler, we leave this comments here, and # Since now have not thorougly test the V2 scheduler, we leave this comments here, and
# just enable the V2 scheduler in the serve code. # just enable the V2 scheduler in the serve code.
self.maxUsn = self.col._usn self.maxUsn = self.col.usn()
self.minUsn = minUsn self.minUsn = minUsn
self.lnewer = not lnewer self.lnewer = not lnewer
# fetch local/server graves
lgraves = self.removed() lgraves = self.removed()
# convert grave:None to {'cards': [], 'notes': [], 'decks': []} # handle AnkiDroid using old protocol
# because req.POST['data'] returned value of grave is None # Only if Operations like deleting deck are performed on Ankidroid
if graves==None: # can (client) graves is not None
graves={'cards': [], 'notes': [], 'decks': []} if graves is not None:
self.remove(graves) self.apply_graves(graves,self.maxUsn)
return lgraves return lgraves
def applyGraves(self, chunk): def applyGraves(self, chunk):
self.remove(chunk) self.apply_graves(chunk,self.maxUsn)
def applyChanges(self, changes): def applyChanges(self, changes):
self.rchg = changes self.rchg = changes
@ -138,8 +140,8 @@ class SyncCollectionHandler(Syncer):
self.mergeChanges(lchg, self.rchg) self.mergeChanges(lchg, self.rchg)
return lchg return lchg
def sanityCheck2(self, client, full=None): def sanityCheck2(self, client):
server = self.sanityCheck(full) server = self.sanityCheck()
if client != server: if client != server:
logger.info( logger.info(
f"sanity check failed with server: {server} client: {client}" f"sanity check failed with server: {server} client: {client}"
@ -148,7 +150,7 @@ class SyncCollectionHandler(Syncer):
return dict(status="bad", c=client, s=server) return dict(status="bad", c=client, s=server)
return dict(status="ok") return dict(status="ok")
def finish(self, mod=None): def finish(self):
return super().finish(anki.utils.intTime(1000)) return super().finish(anki.utils.intTime(1000))
# This function had to be put here in its entirety because Syncer.removed() # This function had to be put here in its entirety because Syncer.removed()
@ -178,7 +180,7 @@ class SyncCollectionHandler(Syncer):
def getDecks(self): def getDecks(self):
return [ return [
[g for g in self.col.decks.all() if g['usn'] >= self.minUsn], [g for g in self.col.decks.all() if g['usn'] >= self.minUsn],
[g for g in self.col.decks.allConf() if g['usn'] >= self.minUsn] [g for g in self.col.decks.all_config() if g['usn'] >= self.minUsn]
] ]
def getTags(self): def getTags(self):
@ -338,7 +340,6 @@ class SyncMediaHandler:
if lastUsn < server_lastUsn or lastUsn == 0: if lastUsn < server_lastUsn or lastUsn == 0:
for fname,usn,csum, in self.col.media.changes(lastUsn): for fname,usn,csum, in self.col.media.changes(lastUsn):
result.append([fname, usn, csum]) result.append([fname, usn, csum])
# anki assumes server_lastUsn == result[-1][1] # anki assumes server_lastUsn == result[-1][1]
# ref: anki/sync.py:720 (commit cca3fcb2418880d0430a5c5c2e6b81ba260065b7) # ref: anki/sync.py:720 (commit cca3fcb2418880d0430a5c5c2e6b81ba260065b7)
result.reverse() result.reverse()
@ -394,7 +395,125 @@ class SyncUserSession:
# for inactivity and then later re-open it (creating a new Collection object). # for inactivity and then later re-open it (creating a new Collection object).
handler.col = col handler.col = col
return handler return handler
class Requests(object):
'''parse request message from client'''
def __init__(self,environ: dict):
self.query_string=environ['QUERY_STRING']
self.environ=environ
self.data=None
@property
def path(self):
return self.environ['PATH_INFO']
@property
def parse_request(self):
'''Return a MultiDict containing all the variables from a form
request.'''
env = self.environ
content_len= env.get('CONTENT_LENGTH', '0')
input = env.get('wsgi.input')
length = 0 if content_len == '' else int(content_len)
body=b''
d={}
if length == 0:
if input is None:
return
if env.get('HTTP_TRANSFER_ENCODING','0') == 'chunked':
bd=b''
size = int(input.readline(),16)
while size > 0:
bd += (input.read(size+2)).strip()
size = int(input.readline(),16)
repeat=re.findall(b'^(.*?)Content-Disposition: form-data; name="data"',bd,re.MULTILINE)
items=re.split(repeat,bd)
# del first ,last item
items.pop()
items.pop(0)
for item in items:
if b'name="data"' in item:
dt=item.strip(b'Content-Disposition: form-data; name="data"; filename="data"')
d['data']=dt
continue
key=re.findall(b'name="(.*?)"',item)[0].decode('utf-8')
v=item[item.rfind(b'"')+1:].decode('utf-8')
d[key]=v
return d
if self.query_string !='':
# GET method
body=self.query_string
d=urllib.parse.parse_qs(body)
for k,v in d.items():
d[k]=''.join(v)
return d
# request server with web server
if self.path=='/' :
d= {'url':b'Anki Sync Server'}
return d
if self.path=='/favicon.ico' :
d= {'url':b''}
return d
else:
body = env['wsgi.input'].read(length)
if body is None or body ==b'':
return 'empty body'
# process body to dict
repeat=body.splitlines()[0]
items=re.split(repeat,body)
# del first ,last item
items.pop()
items.pop(0)
for item in items:
if b'name="data"' in item:
bt=None
# remove \r\n
if b'application/octet-stream' in item:
# Ankidroid case
item=re.sub(b'Content-Disposition: form-data; name="data"; filename="data"',b'',item)
item=re.sub(b'Content-Type: application/octet-stream',b'',item)
bt=item.strip()
else:
# PKzip file stream and others
item=re.sub(b'Content-Disposition: form-data; name="data"; filename="data"',b'',item)
bt=item.strip()
d['data']=bt
continue
item=re.sub(b'\r\n',b'',item,flags=re.MULTILINE)
key=re.findall(b'name="(.*?)"',item)[0].decode('utf-8')
v=item[item.rfind(b'"')+1:].decode('utf-8')
d[key]=v
return d
@property
def params(self):
"""
A dictionary-like object containing both the parameters from
the query string and request body.
"""
r=self.parse_request
if r is None :
return 'POST or GET is None'
else:
params = MultiDict(r)
return params
class MultiDict(object):
def __init__(self, *dicts):
for d in dicts:
if not isinstance(d,dict):
raise TypeError(d)
self.dicts=dicts
def __getitem__(self,key):
for d in self.dicts:
try:
value = d[key]
return value
except KeyError:
raise KeyError(key)
class SyncApp: class SyncApp:
valid_urls = SyncCollectionHandler.operations + SyncMediaHandler.operations + ['hostKey', 'upload', 'download'] valid_urls = SyncCollectionHandler.operations + SyncMediaHandler.operations + ['hostKey', 'upload', 'download']
@ -467,11 +586,12 @@ class SyncApp:
# local copy in Anki # local copy in Anki
return self.full_sync_manager.download(col, session) return self.full_sync_manager.download(col, session)
@wsgify def __call__(self, env,start_resp):
def __call__(self, req): req=Requests(env)
p=req.params
# Get and verify the session # Get and verify the session
try: try:
hkey = req.params['k'] hkey = p['k']
except KeyError: except KeyError:
hkey = None hkey = None
@ -479,18 +599,18 @@ class SyncApp:
if session is None: if session is None:
try: try:
skey = req.POST['sk'] skey = p['sk']
session = self.session_manager.load_from_skey(skey, self.create_session) session = self.session_manager.load_from_skey(skey, self.create_session)
except KeyError: except KeyError:
skey = None skey = None
try: try:
compression = int(req.POST['c']) compression = int(p['c'])
except KeyError: except KeyError:
compression = 0 compression = 0
try: try:
data = req.POST['data'].file.read() data = p['data']
data = self._decode_data(data, compression) data = self._decode_data(data, compression)
except KeyError: except KeyError:
data = {} data = {}
@ -503,7 +623,8 @@ class SyncApp:
if url == 'hostKey': if url == 'hostKey':
result = self.operation_hostKey(data.get("u"), data.get("p")) result = self.operation_hostKey(data.get("u"), data.get("p"))
if result: if result:
return json.dumps(result) resp=Response(json.dumps(result))
return resp(env,start_resp)
else: else:
# TODO: do I have to pass 'null' for the client to receive None? # TODO: do I have to pass 'null' for the client to receive None?
raise HTTPForbidden('null') raise HTTPForbidden('null')
@ -529,17 +650,20 @@ class SyncApp:
if type(result) not in (str, bytes, Response): if type(result) not in (str, bytes, Response):
result = json.dumps(result) result = json.dumps(result)
return result resp=Response(result)
return resp(env,start_resp)
elif url == 'upload': elif url == 'upload':
thread = session.get_thread() thread = session.get_thread()
result = thread.execute(self.operation_upload, [data['data'], session]) result = thread.execute(self.operation_upload, [data['data'], session])
return result resp=Response(json.dumps(result))
return resp(env,start_resp)
elif url == 'download': elif url == 'download':
thread = session.get_thread() thread = session.get_thread()
result = thread.execute(self.operation_download, [session]) result = thread.execute(self.operation_download, [session])
return result resp=Response(result)
return resp(env,start_resp)
# This was one of our operations but it didn't get handled... Oops! # This was one of our operations but it didn't get handled... Oops!
raise HTTPInternalServerError() raise HTTPInternalServerError()
@ -563,9 +687,10 @@ class SyncApp:
if type(result) not in (str, bytes): if type(result) not in (str, bytes):
result = json.dumps(result) result = json.dumps(result)
return result resp=Response(result)
return resp(env,start_resp)
return "Anki Sync Server" resp=Response(p['url'])
return resp(env,start_resp)
@staticmethod @staticmethod
def _execute_handler_method_in_thread(method_name, keyword_args, session): def _execute_handler_method_in_thread(method_name, keyword_args, session):
@ -631,5 +756,5 @@ def main():
finally: finally:
shutdown() shutdown()
if __name__ == '__main__': if __name__ == '__main__':
main() main()

0
src/ankisyncd_cli/migrate_user_tables.py Normal file → Executable file
View File