diff --git a/www/src/brython.js b/www/src/brython.js index d3bf6582a..7b88f8cd3 100644 --- a/www/src/brython.js +++ b/www/src/brython.js @@ -61,7 +61,7 @@ $B.regexIdentifier=/^(?:[\$A-Z_a-z\xAA\xB5\xBA\xC0-\xD6\xD8-\xF6\xF8-\u02C1\u02C __BRYTHON__.implementation=[3,3,1,'alpha',0] __BRYTHON__.__MAGIC__="3.3.1" __BRYTHON__.version_info=[3,3,0,'alpha',0] -__BRYTHON__.compiled_date="2017-02-05 18:37:22.864657" +__BRYTHON__.compiled_date="2017-02-05 21:43:42.821780" __BRYTHON__.builtin_module_names=["posix","sys","errno","time","_ajax","_base64","_jsre","_multiprocessing","_posixsubprocess","_profile","_svg","_sys","builtins","dis","hashlib","json","long_int","math","modulefinder","random","_abcoll","_codecs","_collections","_csv","_functools","_imp","_io","_random","_socket","_sre","_string","_struct","_sysconfigdata","_testcapi","_thread","_warnings","_weakref"] ;(function($B){var js,$pos,res,$op @@ -1069,9 +1069,8 @@ var new_node=new $Node() new $NodeJSCtx(new_node,js) make_args_nodes.push(new_node) if(this.type=='generator'){ -var new_node=new $Node() -new $NodeJSCtx(new_node,'for(var $var in $ns){$locals[$var]=$ns[$var]};') -make_args_nodes.push(new_node)} +js='for(var $var in $ns){$locals[$var]=$ns[$var]};' +make_args_nodes.push($NodeJS(js))} var only_positional=false if(this.other_args===null && this.other_kw===null && this.after_star.length==0 @@ -1114,32 +1113,25 @@ new $NodeJSCtx(def_func_node,'')}else{new $NodeJSCtx(def_func_node,'')} def_func_node.is_def_func=true def_func_node.module=this.module var last_instr=node.children[node.children.length-1].C.tree[0] -if(last_instr.type!=='return' && this.type!='generator'){node.add($NodeJS('$B.leave_frame($local_name);return None'))} +if(last_instr.type!=='return' && this.type!='generator'){ +node.add($NodeJS('$B.leave_frame($local_name);return None'))} node.add(def_func_node) var offset=1 var indent=node.indent -js=name+'.$infos = {' -var name_decl=new $Node() -new $NodeJSCtx(name_decl,js) -node.parent.insert(rank+offset++,name_decl) +node.parent.insert(rank+offset++,$NodeJS(name+'.$infos = {')) js=' __name__:"' if(this.scope.ntype=='class'){js+=this.scope.C.tree[0].name+'.'} js +=this.name+'",' -var name_decl=new $Node() -new $NodeJSCtx(name_decl,js) -node.parent.insert(rank+offset++,name_decl) +node.parent.insert(rank+offset++,$NodeJS(js)) var def_names=[] for(var i=0;i 0){except_node.add($NodeJS('$B.$profile.return()'))} except_node.add($NodeJS('$B.leave_frame($local_name);throw err')) parent.add(except_node)} this.transformed=true diff --git a/www/src/brython_dist.js b/www/src/brython_dist.js index 39483309b..9f51b470f 100644 --- a/www/src/brython_dist.js +++ b/www/src/brython_dist.js @@ -61,7 +61,7 @@ $B.regexIdentifier=/^(?:[\$A-Z_a-z\xAA\xB5\xBA\xC0-\xD6\xD8-\xF6\xF8-\u02C1\u02C __BRYTHON__.implementation=[3,3,1,'alpha',0] __BRYTHON__.__MAGIC__="3.3.1" __BRYTHON__.version_info=[3,3,0,'alpha',0] -__BRYTHON__.compiled_date="2017-02-05 18:37:22.864657" +__BRYTHON__.compiled_date="2017-02-05 21:43:42.821780" __BRYTHON__.builtin_module_names=["posix","sys","errno","time","_ajax","_base64","_jsre","_multiprocessing","_posixsubprocess","_profile","_svg","_sys","builtins","dis","hashlib","json","long_int","math","modulefinder","random","_abcoll","_codecs","_collections","_csv","_functools","_imp","_io","_random","_socket","_sre","_string","_struct","_sysconfigdata","_testcapi","_thread","_warnings","_weakref"] ;(function($B){var js,$pos,res,$op @@ -1069,9 +1069,8 @@ var new_node=new $Node() new $NodeJSCtx(new_node,js) make_args_nodes.push(new_node) if(this.type=='generator'){ -var new_node=new $Node() -new $NodeJSCtx(new_node,'for(var $var in $ns){$locals[$var]=$ns[$var]};') -make_args_nodes.push(new_node)} +js='for(var $var in $ns){$locals[$var]=$ns[$var]};' +make_args_nodes.push($NodeJS(js))} var only_positional=false if(this.other_args===null && this.other_kw===null && this.after_star.length==0 @@ -1114,32 +1113,25 @@ new $NodeJSCtx(def_func_node,'')}else{new $NodeJSCtx(def_func_node,'')} def_func_node.is_def_func=true def_func_node.module=this.module var last_instr=node.children[node.children.length-1].C.tree[0] -if(last_instr.type!=='return' && this.type!='generator'){node.add($NodeJS('$B.leave_frame($local_name);return None'))} +if(last_instr.type!=='return' && this.type!='generator'){ +node.add($NodeJS('$B.leave_frame($local_name);return None'))} node.add(def_func_node) var offset=1 var indent=node.indent -js=name+'.$infos = {' -var name_decl=new $Node() -new $NodeJSCtx(name_decl,js) -node.parent.insert(rank+offset++,name_decl) +node.parent.insert(rank+offset++,$NodeJS(name+'.$infos = {')) js=' __name__:"' if(this.scope.ntype=='class'){js+=this.scope.C.tree[0].name+'.'} js +=this.name+'",' -var name_decl=new $Node() -new $NodeJSCtx(name_decl,js) -node.parent.insert(rank+offset++,name_decl) +node.parent.insert(rank+offset++,$NodeJS(js)) var def_names=[] for(var i=0;i 0){except_node.add($NodeJS('$B.$profile.return()'))} except_node.add($NodeJS('$B.leave_frame($local_name);throw err')) parent.add(except_node)} this.transformed=true @@ -11014,5 +11007,5 @@ throw e;}}} return _sys_modules[_spec_name];} $B.import_hooks=import_hooks})(__BRYTHON__) __BRYTHON__.use_VFS = true; -__BRYTHON__.VFS={"base64": [".py", "#! /usr/bin/env python3\n\n\"\"\"RFC 3548: Base16, Base32, Base64 Data Encodings\"\"\"\n\n\n\n\n\n\n\n\n\nimport _base64\n\n__all__=[\n\n'encode','decode','encodebytes','decodebytes',\n\n'b64encode','b64decode','b32encode','b32decode',\n'b16encode','b16decode',\n\n'standard_b64encode','standard_b64decode',\n\n\n\n\n'urlsafe_b64encode','urlsafe_b64decode',\n]\n\n\nbytes_types=(bytes,bytearray)\n\ndef _bytes_from_decode_data(s):\n if isinstance(s,str):\n try :\n return s.encode('ascii')\n except UnicodeEncodeError:\n raise ValueError('string argument should contain only ASCII characters')\n elif isinstance(s,bytes_types):\n return s\n else :\n raise TypeError(\"argument should be bytes or ASCII string, not %s\"%s.__class__.__name__)\n \n \n \n \n \ndef b64encode(s,altchars=None ):\n ''\n\n\n\n\n\n\n\n \n if not isinstance(s,bytes_types):\n raise TypeError(\"expected bytes, not %s\"%s.__class__.__name__)\n if altchars is not None :\n if not isinstance(altchars,bytes_types):\n print('wrong altchars')\n raise TypeError(\"expected bytes, not %s\"\n %altchars.__class__.__name__)\n assert len(altchars)>=2,repr(altchars)\n return _base64.Base64.encode(s,altchars)\n \n \ndef b64decode(s,altchars=None ,validate=False ):\n ''\n\n\n\n\n\n\n\n\n\n\n\n \n if altchars is not None :\n altchars=_bytes_from_decode_data(altchars)\n assert len(altchars)==2,repr(altchars)\n s=s.translate(bytes.maketrans(altchars,b'+/'))\n return _base64.Base64.decode(s,altchars,validate)\n \n \ndef standard_b64encode(s):\n ''\n\n\n \n return b64encode(s)\n \ndef standard_b64decode(s):\n ''\n\n\n\n\n\n \n return b64decode(s)\n \n \n_urlsafe_encode_translation=bytes.maketrans(b'+/',b'-_')\n_urlsafe_decode_translation=bytes.maketrans(b'-_',b'+/')\n\ndef urlsafe_b64encode(s):\n ''\n\n\n\n\n \n return b64encode(s).translate(_urlsafe_encode_translation)\n \ndef urlsafe_b64decode(s):\n ''\n\n\n\n\n\n\n\n \n s=_bytes_from_decode_data(s)\n s=s.translate(_urlsafe_decode_translation)\n return b64decode(s)\n \n \n \n \n_b32alphabet={\n0:b'A',9:b'J',18:b'S',27:b'3',\n1:b'B',10:b'K',19:b'T',28:b'4',\n2:b'C',11:b'L',20:b'U',29:b'5',\n3:b'D',12:b'M',21:b'V',30:b'6',\n4:b'E',13:b'N',22:b'W',31:b'7',\n5:b'F',14:b'O',23:b'X',\n6:b'G',15:b'P',24:b'Y',\n7:b'H',16:b'Q',25:b'Z',\n8:b'I',17:b'R',26:b'2',\n}\n\n_b32tab=[v[0]for k,v in sorted(_b32alphabet.items())]\n_b32rev=dict([(v[0],k)for k,v in _b32alphabet.items()])\n\n\ndef b32encode(s):\n ''\n\n\n \n if not isinstance(s,bytes_types):\n raise TypeError(\"expected bytes, not %s\"%s.__class__.__name__)\n quanta,leftover=divmod(len(s),5)\n \n if leftover:\n s=s+bytes(5 -leftover)\n quanta +=1\n encoded=bytearray()\n for i in range(quanta):\n \n \n \n \n \n c1,c2,c3=struct.unpack('!HHB',s[i *5:(i+1)*5])\n c2 +=(c1&1)<<16\n c3 +=(c2&3)<<8\n encoded +=bytes([_b32tab[c1 >>11],\n _b32tab[(c1 >>6)&0x1f],\n _b32tab[(c1 >>1)&0x1f],\n _b32tab[c2 >>12],\n _b32tab[(c2 >>7)&0x1f],\n _b32tab[(c2 >>2)&0x1f],\n _b32tab[c3 >>5],\n _b32tab[c3&0x1f],\n ])\n \n if leftover ==1:\n encoded[-6:]=b'======'\n elif leftover ==2:\n encoded[-4:]=b'===='\n elif leftover ==3:\n encoded[-3:]=b'==='\n elif leftover ==4:\n encoded[-1:]=b'='\n return bytes(encoded)\n \n \ndef b32decode(s,casefold=False ,map01=None ):\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n s=_bytes_from_decode_data(s)\n quanta,leftover=divmod(len(s),8)\n if leftover:\n raise binascii.Error('Incorrect padding')\n \n \n \n if map01 is not None :\n map01=_bytes_from_decode_data(map01)\n assert len(map01)==1,repr(map01)\n s=s.translate(bytes.maketrans(b'01',b'O'+map01))\n if casefold:\n s=s.upper()\n \n \n \n padchars=0\n mo=re.search(b'(?P[=]*)$',s)\n if mo:\n padchars=len(mo.group('pad'))\n if padchars >0:\n s=s[:-padchars]\n \n parts=[]\n acc=0\n shift=35\n for c in s:\n val=_b32rev.get(c)\n if val is None :\n raise binascii.Error('Non-base32 digit found')\n acc +=_b32rev[c]<'%(state,self._ov.address))\n return info\n \n def _cancel_overlapped(self):\n if self._ov is None :\n return\n try :\n self._ov.cancel()\n except OSError as exc:\n context={\n 'message':'Cancelling an overlapped future failed',\n 'exception':exc,\n 'future':self,\n }\n if self._source_traceback:\n context['source_traceback']=self._source_traceback\n self._loop.call_exception_handler(context)\n self._ov=None\n \n def cancel(self):\n self._cancel_overlapped()\n return super().cancel()\n \n def set_exception(self,exception):\n super().set_exception(exception)\n self._cancel_overlapped()\n \n def set_result(self,result):\n super().set_result(result)\n self._ov=None\n \n \nclass _BaseWaitHandleFuture(futures.Future):\n ''\n \n def __init__(self,ov,handle,wait_handle,*,loop=None ):\n super().__init__(loop=loop)\n if self._source_traceback:\n del self._source_traceback[-1]\n \n \n self._ov=ov\n self._handle=handle\n self._wait_handle=wait_handle\n \n \n \n self._registered=True\n \n def _poll(self):\n \n return (_winapi.WaitForSingleObject(self._handle,0)==\n _winapi.WAIT_OBJECT_0)\n \n def _repr_info(self):\n info=super()._repr_info()\n info.append('handle=%#x'%self._handle)\n if self._handle is not None :\n state='signaled'if self._poll()else'waiting'\n info.append(state)\n if self._wait_handle is not None :\n info.append('wait_handle=%#x'%self._wait_handle)\n return info\n \n def _unregister_wait_cb(self,fut):\n \n \n self._ov=None\n \n def _unregister_wait(self):\n if not self._registered:\n return\n self._registered=False\n \n wait_handle=self._wait_handle\n self._wait_handle=None\n try :\n _overlapped.UnregisterWait(wait_handle)\n except OSError as exc:\n if exc.winerror !=_overlapped.ERROR_IO_PENDING:\n context={\n 'message':'Failed to unregister the wait handle',\n 'exception':exc,\n 'future':self,\n }\n if self._source_traceback:\n context['source_traceback']=self._source_traceback\n self._loop.call_exception_handler(context)\n return\n \n \n self._unregister_wait_cb(None )\n \n def cancel(self):\n self._unregister_wait()\n return super().cancel()\n \n def set_exception(self,exception):\n self._unregister_wait()\n super().set_exception(exception)\n \n def set_result(self,result):\n self._unregister_wait()\n super().set_result(result)\n \n \nclass _WaitCancelFuture(_BaseWaitHandleFuture):\n ''\n\n \n \n def __init__(self,ov,event,wait_handle,*,loop=None ):\n super().__init__(ov,event,wait_handle,loop=loop)\n \n self._done_callback=None\n \n def cancel(self):\n raise RuntimeError(\"_WaitCancelFuture must not be cancelled\")\n \n def _schedule_callbacks(self):\n super(_WaitCancelFuture,self)._schedule_callbacks()\n if self._done_callback is not None :\n self._done_callback(self)\n \n \nclass _WaitHandleFuture(_BaseWaitHandleFuture):\n def __init__(self,ov,handle,wait_handle,proactor,*,loop=None ):\n super().__init__(ov,handle,wait_handle,loop=loop)\n self._proactor=proactor\n self._unregister_proactor=True\n self._event=_overlapped.CreateEvent(None ,True ,False ,None )\n self._event_fut=None\n \n def _unregister_wait_cb(self,fut):\n if self._event is not None :\n _winapi.CloseHandle(self._event)\n self._event=None\n self._event_fut=None\n \n \n \n \n \n \n \n \n self._proactor._unregister(self._ov)\n self._proactor=None\n \n super()._unregister_wait_cb(fut)\n \n def _unregister_wait(self):\n if not self._registered:\n return\n self._registered=False\n \n wait_handle=self._wait_handle\n self._wait_handle=None\n try :\n _overlapped.UnregisterWaitEx(wait_handle,self._event)\n except OSError as exc:\n if exc.winerror !=_overlapped.ERROR_IO_PENDING:\n context={\n 'message':'Failed to unregister the wait handle',\n 'exception':exc,\n 'future':self,\n }\n if self._source_traceback:\n context['source_traceback']=self._source_traceback\n self._loop.call_exception_handler(context)\n return\n \n \n self._event_fut=self._proactor._wait_cancel(self._event,\n self._unregister_wait_cb)\n \n \nclass PipeServer(object):\n ''\n\n\n \n def __init__(self,address):\n self._address=address\n self._free_instances=weakref.WeakSet()\n \n \n \n self._pipe=None\n self._accept_pipe_future=None\n self._pipe=self._server_pipe_handle(True )\n \n def _get_unconnected_pipe(self):\n \n \n \n \n tmp,self._pipe=self._pipe,self._server_pipe_handle(False )\n return tmp\n \n def _server_pipe_handle(self,first):\n \n if self.closed():\n return None\n flags=_winapi.PIPE_ACCESS_DUPLEX |_winapi.FILE_FLAG_OVERLAPPED\n if first:\n flags |=_winapi.FILE_FLAG_FIRST_PIPE_INSTANCE\n h=_winapi.CreateNamedPipe(\n self._address,flags,\n _winapi.PIPE_TYPE_MESSAGE |_winapi.PIPE_READMODE_MESSAGE |\n _winapi.PIPE_WAIT,\n _winapi.PIPE_UNLIMITED_INSTANCES,\n windows_utils.BUFSIZE,windows_utils.BUFSIZE,\n _winapi.NMPWAIT_WAIT_FOREVER,_winapi.NULL)\n pipe=windows_utils.PipeHandle(h)\n self._free_instances.add(pipe)\n return pipe\n \n def closed(self):\n return (self._address is None )\n \n def close(self):\n if self._accept_pipe_future is not None :\n self._accept_pipe_future.cancel()\n self._accept_pipe_future=None\n \n if self._address is not None :\n for pipe in self._free_instances:\n pipe.close()\n self._pipe=None\n self._address=None\n self._free_instances.clear()\n \n __del__=close\n \n \nclass _WindowsSelectorEventLoop(selector_events.BaseSelectorEventLoop):\n ''\n \n def _socketpair(self):\n return windows_utils.socketpair()\n \n \nclass ProactorEventLoop(proactor_events.BaseProactorEventLoop):\n ''\n \n def __init__(self,proactor=None ):\n if proactor is None :\n proactor=IocpProactor()\n super().__init__(proactor)\n \n def _socketpair(self):\n return windows_utils.socketpair()\n \n @coroutine\n def create_pipe_connection(self,protocol_factory,address):\n f=self._proactor.connect_pipe(address)\n pipe=yield from f\n protocol=protocol_factory()\n trans=self._make_duplex_pipe_transport(pipe,protocol,\n extra={'addr':address})\n return trans,protocol\n \n @coroutine\n def start_serving_pipe(self,protocol_factory,address):\n server=PipeServer(address)\n \n def loop_accept_pipe(f=None ):\n pipe=None\n try :\n if f:\n pipe=f.result()\n server._free_instances.discard(pipe)\n \n if server.closed():\n \n \n pipe.close()\n return\n \n protocol=protocol_factory()\n self._make_duplex_pipe_transport(\n pipe,protocol,extra={'addr':address})\n \n pipe=server._get_unconnected_pipe()\n if pipe is None :\n return\n \n f=self._proactor.accept_pipe(pipe)\n except OSError as exc:\n if pipe and pipe.fileno()!=-1:\n self.call_exception_handler({\n 'message':'Pipe accept failed',\n 'exception':exc,\n 'pipe':pipe,\n })\n pipe.close()\n elif self._debug:\n logger.warning(\"Accept pipe failed on pipe %r\",\n pipe,exc_info=True )\n except futures.CancelledError:\n if pipe:\n pipe.close()\n else :\n server._accept_pipe_future=f\n f.add_done_callback(loop_accept_pipe)\n \n self.call_soon(loop_accept_pipe)\n return [server]\n \n @coroutine\n def _make_subprocess_transport(self,protocol,args,shell,\n stdin,stdout,stderr,bufsize,\n extra=None ,**kwargs):\n waiter=futures.Future(loop=self)\n transp=_WindowsSubprocessTransport(self,protocol,args,shell,\n stdin,stdout,stderr,bufsize,\n waiter=waiter,extra=extra,\n **kwargs)\n try :\n yield from waiter\n except Exception as exc:\n \n \n err=exc\n else :\n err=None\n \n if err is not None :\n transp.close()\n yield from transp._wait()\n raise err\n \n return transp\n \n \nclass IocpProactor:\n ''\n \n def __init__(self,concurrency=0xffffffff):\n self._loop=None\n self._results=[]\n self._iocp=_overlapped.CreateIoCompletionPort(\n _overlapped.INVALID_HANDLE_VALUE,NULL,0,concurrency)\n self._cache={}\n self._registered=weakref.WeakSet()\n self._unregistered=[]\n self._stopped_serving=weakref.WeakSet()\n \n def __repr__(self):\n return ('<%s overlapped#=%s result#=%s>'\n %(self.__class__.__name__,len(self._cache),\n len(self._results)))\n \n def set_loop(self,loop):\n self._loop=loop\n \n def select(self,timeout=None ):\n if not self._results:\n self._poll(timeout)\n tmp=self._results\n self._results=[]\n return tmp\n \n def _result(self,value):\n fut=futures.Future(loop=self._loop)\n fut.set_result(value)\n return fut\n \n def recv(self,conn,nbytes,flags=0):\n self._register_with_iocp(conn)\n ov=_overlapped.Overlapped(NULL)\n try :\n if isinstance(conn,socket.socket):\n ov.WSARecv(conn.fileno(),nbytes,flags)\n else :\n ov.ReadFile(conn.fileno(),nbytes)\n except BrokenPipeError:\n return self._result(b'')\n \n def finish_recv(trans,key,ov):\n try :\n return ov.getresult()\n except OSError as exc:\n if exc.winerror ==_overlapped.ERROR_NETNAME_DELETED:\n raise ConnectionResetError(*exc.args)\n else :\n raise\n \n return self._register(ov,conn,finish_recv)\n \n def send(self,conn,buf,flags=0):\n self._register_with_iocp(conn)\n ov=_overlapped.Overlapped(NULL)\n if isinstance(conn,socket.socket):\n ov.WSASend(conn.fileno(),buf,flags)\n else :\n ov.WriteFile(conn.fileno(),buf)\n \n def finish_send(trans,key,ov):\n try :\n return ov.getresult()\n except OSError as exc:\n if exc.winerror ==_overlapped.ERROR_NETNAME_DELETED:\n raise ConnectionResetError(*exc.args)\n else :\n raise\n \n return self._register(ov,conn,finish_send)\n \n def accept(self,listener):\n self._register_with_iocp(listener)\n conn=self._get_accept_socket(listener.family)\n ov=_overlapped.Overlapped(NULL)\n ov.AcceptEx(listener.fileno(),conn.fileno())\n \n def finish_accept(trans,key,ov):\n ov.getresult()\n \n buf=struct.pack('@P',listener.fileno())\n conn.setsockopt(socket.SOL_SOCKET,\n _overlapped.SO_UPDATE_ACCEPT_CONTEXT,buf)\n conn.settimeout(listener.gettimeout())\n return conn,conn.getpeername()\n \n @coroutine\n def accept_coro(future,conn):\n \n try :\n yield from future\n except futures.CancelledError:\n conn.close()\n raise\n \n future=self._register(ov,listener,finish_accept)\n coro=accept_coro(future,conn)\n tasks.async(coro,loop=self._loop)\n return future\n \n def connect(self,conn,address):\n self._register_with_iocp(conn)\n \n try :\n _overlapped.BindLocal(conn.fileno(),conn.family)\n except OSError as e:\n if e.winerror !=errno.WSAEINVAL:\n raise\n \n if conn.getsockname()[1]==0:\n raise\n ov=_overlapped.Overlapped(NULL)\n ov.ConnectEx(conn.fileno(),address)\n \n def finish_connect(trans,key,ov):\n ov.getresult()\n \n conn.setsockopt(socket.SOL_SOCKET,\n _overlapped.SO_UPDATE_CONNECT_CONTEXT,0)\n return conn\n \n return self._register(ov,conn,finish_connect)\n \n def accept_pipe(self,pipe):\n self._register_with_iocp(pipe)\n ov=_overlapped.Overlapped(NULL)\n connected=ov.ConnectNamedPipe(pipe.fileno())\n \n if connected:\n \n \n \n return self._result(pipe)\n \n def finish_accept_pipe(trans,key,ov):\n ov.getresult()\n return pipe\n \n return self._register(ov,pipe,finish_accept_pipe)\n \n @coroutine\n def connect_pipe(self,address):\n delay=CONNECT_PIPE_INIT_DELAY\n while True :\n \n \n \n try :\n handle=_overlapped.ConnectPipe(address)\n break\n except OSError as exc:\n if exc.winerror !=_overlapped.ERROR_PIPE_BUSY:\n raise\n \n \n delay=min(delay *2,CONNECT_PIPE_MAX_DELAY)\n yield from tasks.sleep(delay,loop=self._loop)\n \n return windows_utils.PipeHandle(handle)\n \n def wait_for_handle(self,handle,timeout=None ):\n ''\n\n\n\n \n return self._wait_for_handle(handle,timeout,False )\n \n def _wait_cancel(self,event,done_callback):\n fut=self._wait_for_handle(event,None ,True )\n \n \n fut._done_callback=done_callback\n return fut\n \n def _wait_for_handle(self,handle,timeout,_is_cancel):\n if timeout is None :\n ms=_winapi.INFINITE\n else :\n \n \n ms=math.ceil(timeout *1e3)\n \n \n ov=_overlapped.Overlapped(NULL)\n wait_handle=_overlapped.RegisterWaitWithQueue(\n handle,self._iocp,ov.address,ms)\n if _is_cancel:\n f=_WaitCancelFuture(ov,handle,wait_handle,loop=self._loop)\n else :\n f=_WaitHandleFuture(ov,handle,wait_handle,self,\n loop=self._loop)\n if f._source_traceback:\n del f._source_traceback[-1]\n \n def finish_wait_for_handle(trans,key,ov):\n \n \n \n \n \n \n return f._poll()\n \n self._cache[ov.address]=(f,ov,0,finish_wait_for_handle)\n return f\n \n def _register_with_iocp(self,obj):\n \n \n if obj not in self._registered:\n self._registered.add(obj)\n _overlapped.CreateIoCompletionPort(obj.fileno(),self._iocp,0,0)\n \n \n \n \n def _register(self,ov,obj,callback):\n \n \n \n f=_OverlappedFuture(ov,loop=self._loop)\n if f._source_traceback:\n del f._source_traceback[-1]\n if not ov.pending:\n \n \n \n \n try :\n value=callback(None ,None ,ov)\n except OSError as e:\n f.set_exception(e)\n else :\n f.set_result(value)\n \n \n \n \n \n \n \n \n \n self._cache[ov.address]=(f,ov,obj,callback)\n return f\n \n def _unregister(self,ov):\n ''\n\n\n\n\n \n self._unregistered.append(ov)\n \n def _get_accept_socket(self,family):\n s=socket.socket(family)\n s.settimeout(0)\n return s\n \n def _poll(self,timeout=None ):\n if timeout is None :\n ms=INFINITE\n elif timeout <0:\n raise ValueError(\"negative timeout\")\n else :\n \n \n ms=math.ceil(timeout *1e3)\n if ms >=INFINITE:\n raise ValueError(\"timeout too big\")\n \n while True :\n status=_overlapped.GetQueuedCompletionStatus(self._iocp,ms)\n if status is None :\n break\n ms=0\n \n err,transferred,key,address=status\n try :\n f,ov,obj,callback=self._cache.pop(address)\n except KeyError:\n if self._loop.get_debug():\n self._loop.call_exception_handler({\n 'message':('GetQueuedCompletionStatus() returned an '\n 'unexpected event'),\n 'status':('err=%s transferred=%s key=%#x address=%#x'\n %(err,transferred,key,address)),\n })\n \n \n \n if key not in (0,_overlapped.INVALID_HANDLE_VALUE):\n _winapi.CloseHandle(key)\n continue\n \n if obj in self._stopped_serving:\n f.cancel()\n \n \n elif not f.done():\n try :\n value=callback(transferred,key,ov)\n except OSError as e:\n f.set_exception(e)\n self._results.append(f)\n else :\n f.set_result(value)\n self._results.append(f)\n \n \n for ov in self._unregistered:\n self._cache.pop(ov.address,None )\n self._unregistered.clear()\n \n def _stop_serving(self,obj):\n \n \n \n self._stopped_serving.add(obj)\n \n def close(self):\n \n for address,(fut,ov,obj,callback)in list(self._cache.items()):\n if fut.cancelled():\n \n pass\n elif isinstance(fut,_WaitCancelFuture):\n \n pass\n else :\n try :\n fut.cancel()\n except OSError as exc:\n if self._loop is not None :\n context={\n 'message':'Cancelling a future failed',\n 'exception':exc,\n 'future':fut,\n }\n if fut._source_traceback:\n context['source_traceback']=fut._source_traceback\n self._loop.call_exception_handler(context)\n \n while self._cache:\n if not self._poll(1):\n logger.debug('taking long time to close proactor')\n \n self._results=[]\n if self._iocp is not None :\n _winapi.CloseHandle(self._iocp)\n self._iocp=None\n \n def __del__(self):\n self.close()\n \n \nclass _WindowsSubprocessTransport(base_subprocess.BaseSubprocessTransport):\n\n def _start(self,args,shell,stdin,stdout,stderr,bufsize,**kwargs):\n self._proc=windows_utils.Popen(\n args,shell=shell,stdin=stdin,stdout=stdout,stderr=stderr,\n bufsize=bufsize,**kwargs)\n \n def callback(f):\n returncode=self._proc.poll()\n self._process_exited(returncode)\n \n f=self._loop._proactor.wait_for_handle(int(self._proc._handle))\n f.add_done_callback(callback)\n \n \nSelectorEventLoop=_WindowsSelectorEventLoop\n\n\nclass _WindowsDefaultEventLoopPolicy(events.BaseDefaultEventLoopPolicy):\n _loop_factory=SelectorEventLoop\n \n \nDefaultEventLoopPolicy=_WindowsDefaultEventLoopPolicy\n"], "asyncio.unix_events": [".py", "''\n\nimport errno\nimport os\nimport signal\nimport socket\nimport stat\nimport subprocess\nimport sys\nimport threading\nimport warnings\n\n\nfrom .import base_events\nfrom .import base_subprocess\nfrom .import constants\nfrom .import coroutines\nfrom .import events\nfrom .import futures\nfrom .import selector_events\nfrom .import selectors\nfrom .import transports\nfrom .coroutines import coroutine\nfrom .log import logger\n\n\n__all__=['SelectorEventLoop',\n'AbstractChildWatcher','SafeChildWatcher',\n'FastChildWatcher','DefaultEventLoopPolicy',\n]\n\nif sys.platform =='win32':\n raise ImportError('Signals are not really supported on Windows')\n \n \ndef _sighandler_noop(signum,frame):\n ''\n pass\n \n \nclass _UnixSelectorEventLoop(selector_events.BaseSelectorEventLoop):\n ''\n\n\n \n \n def __init__(self,selector=None ):\n super().__init__(selector)\n self._signal_handlers={}\n \n def _socketpair(self):\n return socket.socketpair()\n \n def close(self):\n super().close()\n for sig in list(self._signal_handlers):\n self.remove_signal_handler(sig)\n \n def _process_self_data(self,data):\n for signum in data:\n if not signum:\n \n continue\n self._handle_signal(signum)\n \n def add_signal_handler(self,sig,callback,*args):\n ''\n\n\n\n \n if (coroutines.iscoroutine(callback)\n or coroutines.iscoroutinefunction(callback)):\n raise TypeError(\"coroutines cannot be used \"\n \"with add_signal_handler()\")\n self._check_signal(sig)\n self._check_closed()\n try :\n \n \n \n \n signal.set_wakeup_fd(self._csock.fileno())\n except (ValueError,OSError)as exc:\n raise RuntimeError(str(exc))\n \n handle=events.Handle(callback,args,self)\n self._signal_handlers[sig]=handle\n \n try :\n \n \n \n signal.signal(sig,_sighandler_noop)\n \n \n signal.siginterrupt(sig,False )\n except OSError as exc:\n del self._signal_handlers[sig]\n if not self._signal_handlers:\n try :\n signal.set_wakeup_fd(-1)\n except (ValueError,OSError)as nexc:\n logger.info('set_wakeup_fd(-1) failed: %s',nexc)\n \n if exc.errno ==errno.EINVAL:\n raise RuntimeError('sig {} cannot be caught'.format(sig))\n else :\n raise\n \n def _handle_signal(self,sig):\n ''\n handle=self._signal_handlers.get(sig)\n if handle is None :\n return\n if handle._cancelled:\n self.remove_signal_handler(sig)\n else :\n self._add_callback_signalsafe(handle)\n \n def remove_signal_handler(self,sig):\n ''\n\n\n \n self._check_signal(sig)\n try :\n del self._signal_handlers[sig]\n except KeyError:\n return False\n \n if sig ==signal.SIGINT:\n handler=signal.default_int_handler\n else :\n handler=signal.SIG_DFL\n \n try :\n signal.signal(sig,handler)\n except OSError as exc:\n if exc.errno ==errno.EINVAL:\n raise RuntimeError('sig {} cannot be caught'.format(sig))\n else :\n raise\n \n if not self._signal_handlers:\n try :\n signal.set_wakeup_fd(-1)\n except (ValueError,OSError)as exc:\n logger.info('set_wakeup_fd(-1) failed: %s',exc)\n \n return True\n \n def _check_signal(self,sig):\n ''\n\n\n\n \n if not isinstance(sig,int):\n raise TypeError('sig must be an int, not {!r}'.format(sig))\n \n if not (1 <=sig '%' '.join(info)\n \n def _read_ready(self):\n try :\n data=os.read(self._fileno,self.max_size)\n except (BlockingIOError,InterruptedError):\n pass\n except OSError as exc:\n self._fatal_error(exc,'Fatal read error on pipe transport')\n else :\n if data:\n self._protocol.data_received(data)\n else :\n if self._loop.get_debug():\n logger.info(\"%r was closed by peer\",self)\n self._closing=True\n self._loop.remove_reader(self._fileno)\n self._loop.call_soon(self._protocol.eof_received)\n self._loop.call_soon(self._call_connection_lost,None )\n \n def pause_reading(self):\n self._loop.remove_reader(self._fileno)\n \n def resume_reading(self):\n self._loop.add_reader(self._fileno,self._read_ready)\n \n def close(self):\n if not self._closing:\n self._close(None )\n \n \n \n \n if sys.version_info >=(3,4):\n def __del__(self):\n if self._pipe is not None :\n warnings.warn(\"unclosed transport %r\"%self,ResourceWarning)\n self._pipe.close()\n \n def _fatal_error(self,exc,message='Fatal error on pipe transport'):\n \n if (isinstance(exc,OSError)and exc.errno ==errno.EIO):\n if self._loop.get_debug():\n logger.debug(\"%r: %s\",self,message,exc_info=True )\n else :\n self._loop.call_exception_handler({\n 'message':message,\n 'exception':exc,\n 'transport':self,\n 'protocol':self._protocol,\n })\n self._close(exc)\n \n def _close(self,exc):\n self._closing=True\n self._loop.remove_reader(self._fileno)\n self._loop.call_soon(self._call_connection_lost,exc)\n \n def _call_connection_lost(self,exc):\n try :\n self._protocol.connection_lost(exc)\n finally :\n self._pipe.close()\n self._pipe=None\n self._protocol=None\n self._loop=None\n \n \nclass _UnixWritePipeTransport(transports._FlowControlMixin,\ntransports.WriteTransport):\n\n def __init__(self,loop,pipe,protocol,waiter=None ,extra=None ):\n super().__init__(extra,loop)\n self._extra['pipe']=pipe\n self._pipe=pipe\n self._fileno=pipe.fileno()\n mode=os.fstat(self._fileno).st_mode\n is_socket=stat.S_ISSOCK(mode)\n if not (is_socket or\n stat.S_ISFIFO(mode)or\n stat.S_ISCHR(mode)):\n raise ValueError(\"Pipe transport is only for \"\n \"pipes, sockets and character devices\")\n _set_nonblocking(self._fileno)\n self._protocol=protocol\n self._buffer=[]\n self._conn_lost=0\n self._closing=False\n \n self._loop.call_soon(self._protocol.connection_made,self)\n \n \n \n \n if is_socket or not sys.platform.startswith(\"aix\"):\n \n self._loop.call_soon(self._loop.add_reader,\n self._fileno,self._read_ready)\n \n if waiter is not None :\n \n self._loop.call_soon(waiter._set_result_unless_cancelled,None )\n \n def __repr__(self):\n info=[self.__class__.__name__]\n if self._pipe is None :\n info.append('closed')\n elif self._closing:\n info.append('closing')\n info.append('fd=%s'%self._fileno)\n if self._pipe is not None :\n polling=selector_events._test_selector_event(\n self._loop._selector,\n self._fileno,selectors.EVENT_WRITE)\n if polling:\n info.append('polling')\n else :\n info.append('idle')\n \n bufsize=self.get_write_buffer_size()\n info.append('bufsize=%s'%bufsize)\n else :\n info.append('closed')\n return'<%s>'%' '.join(info)\n \n def get_write_buffer_size(self):\n return sum(len(data)for data in self._buffer)\n \n def _read_ready(self):\n \n if self._loop.get_debug():\n logger.info(\"%r was closed by peer\",self)\n if self._buffer:\n self._close(BrokenPipeError())\n else :\n self._close()\n \n def write(self,data):\n assert isinstance(data,(bytes,bytearray,memoryview)),repr(data)\n if isinstance(data,bytearray):\n data=memoryview(data)\n if not data:\n return\n \n if self._conn_lost or self._closing:\n if self._conn_lost >=constants.LOG_THRESHOLD_FOR_CONNLOST_WRITES:\n logger.warning('pipe closed by peer or '\n 'os.write(pipe, data) raised exception.')\n self._conn_lost +=1\n return\n \n if not self._buffer:\n \n try :\n n=os.write(self._fileno,data)\n except (BlockingIOError,InterruptedError):\n n=0\n except Exception as exc:\n self._conn_lost +=1\n self._fatal_error(exc,'Fatal write error on pipe transport')\n return\n if n ==len(data):\n return\n elif n >0:\n data=data[n:]\n self._loop.add_writer(self._fileno,self._write_ready)\n \n self._buffer.append(data)\n self._maybe_pause_protocol()\n \n def _write_ready(self):\n data=b''.join(self._buffer)\n assert data,'Data should not be empty'\n \n self._buffer.clear()\n try :\n n=os.write(self._fileno,data)\n except (BlockingIOError,InterruptedError):\n self._buffer.append(data)\n except Exception as exc:\n self._conn_lost +=1\n \n \n self._loop.remove_writer(self._fileno)\n self._fatal_error(exc,'Fatal write error on pipe transport')\n else :\n if n ==len(data):\n self._loop.remove_writer(self._fileno)\n self._maybe_resume_protocol()\n if not self._buffer and self._closing:\n self._loop.remove_reader(self._fileno)\n self._call_connection_lost(None )\n return\n elif n >0:\n data=data[n:]\n \n self._buffer.append(data)\n \n def can_write_eof(self):\n return True\n \n def write_eof(self):\n if self._closing:\n return\n assert self._pipe\n self._closing=True\n if not self._buffer:\n self._loop.remove_reader(self._fileno)\n self._loop.call_soon(self._call_connection_lost,None )\n \n def close(self):\n if self._pipe is not None and not self._closing:\n \n self.write_eof()\n \n \n \n \n if sys.version_info >=(3,4):\n def __del__(self):\n if self._pipe is not None :\n warnings.warn(\"unclosed transport %r\"%self,ResourceWarning)\n self._pipe.close()\n \n def abort(self):\n self._close(None )\n \n def _fatal_error(self,exc,message='Fatal error on pipe transport'):\n \n if isinstance(exc,(BrokenPipeError,ConnectionResetError)):\n if self._loop.get_debug():\n logger.debug(\"%r: %s\",self,message,exc_info=True )\n else :\n self._loop.call_exception_handler({\n 'message':message,\n 'exception':exc,\n 'transport':self,\n 'protocol':self._protocol,\n })\n self._close(exc)\n \n def _close(self,exc=None ):\n self._closing=True\n if self._buffer:\n self._loop.remove_writer(self._fileno)\n self._buffer.clear()\n self._loop.remove_reader(self._fileno)\n self._loop.call_soon(self._call_connection_lost,exc)\n \n def _call_connection_lost(self,exc):\n try :\n self._protocol.connection_lost(exc)\n finally :\n self._pipe.close()\n self._pipe=None\n self._protocol=None\n self._loop=None\n \n \nif hasattr(os,'set_inheritable'):\n\n _set_inheritable=os.set_inheritable\nelse :\n import fcntl\n \n def _set_inheritable(fd,inheritable):\n cloexec_flag=getattr(fcntl,'FD_CLOEXEC',1)\n \n old=fcntl.fcntl(fd,fcntl.F_GETFD)\n if not inheritable:\n fcntl.fcntl(fd,fcntl.F_SETFD,old |cloexec_flag)\n else :\n fcntl.fcntl(fd,fcntl.F_SETFD,old&~cloexec_flag)\n \n \nclass _UnixSubprocessTransport(base_subprocess.BaseSubprocessTransport):\n\n def _start(self,args,shell,stdin,stdout,stderr,bufsize,**kwargs):\n stdin_w=None\n if stdin ==subprocess.PIPE:\n \n \n \n \n \n stdin,stdin_w=self._loop._socketpair()\n \n \n \n \n \n _set_inheritable(stdin_w.fileno(),False )\n self._proc=subprocess.Popen(\n args,shell=shell,stdin=stdin,stdout=stdout,stderr=stderr,\n universal_newlines=False ,bufsize=bufsize,**kwargs)\n if stdin_w is not None :\n stdin.close()\n self._proc.stdin=open(stdin_w.detach(),'wb',buffering=bufsize)\n \n \nclass AbstractChildWatcher:\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n \n def add_child_handler(self,pid,callback,*args):\n ''\n\n\n\n\n\n\n \n raise NotImplementedError()\n \n def remove_child_handler(self,pid):\n ''\n\n\n \n \n raise NotImplementedError()\n \n def attach_loop(self,loop):\n ''\n\n\n\n\n\n \n raise NotImplementedError()\n \n def close(self):\n ''\n\n\n \n raise NotImplementedError()\n \n def __enter__(self):\n ''\n\n \n raise NotImplementedError()\n \n def __exit__(self,a,b,c):\n ''\n raise NotImplementedError()\n \n \nclass BaseChildWatcher(AbstractChildWatcher):\n\n def __init__(self):\n self._loop=None\n \n def close(self):\n self.attach_loop(None )\n \n def _do_waitpid(self,expected_pid):\n raise NotImplementedError()\n \n def _do_waitpid_all(self):\n raise NotImplementedError()\n \n def attach_loop(self,loop):\n assert loop is None or isinstance(loop,events.AbstractEventLoop)\n \n if self._loop is not None :\n self._loop.remove_signal_handler(signal.SIGCHLD)\n \n self._loop=loop\n if loop is not None :\n loop.add_signal_handler(signal.SIGCHLD,self._sig_chld)\n \n \n \n self._do_waitpid_all()\n \n def _sig_chld(self):\n try :\n self._do_waitpid_all()\n except Exception as exc:\n \n \n \n self._loop.call_exception_handler({\n 'message':'Unknown exception in SIGCHLD handler',\n 'exception':exc,\n })\n \n def _compute_returncode(self,status):\n if os.WIFSIGNALED(status):\n \n return -os.WTERMSIG(status)\n elif os.WIFEXITED(status):\n \n return os.WEXITSTATUS(status)\n else :\n \n \n \n return status\n \n \nclass SafeChildWatcher(BaseChildWatcher):\n ''\n\n\n\n\n\n\n\n \n \n def __init__(self):\n super().__init__()\n self._callbacks={}\n \n def close(self):\n self._callbacks.clear()\n super().close()\n \n def __enter__(self):\n return self\n \n def __exit__(self,a,b,c):\n pass\n \n def add_child_handler(self,pid,callback,*args):\n self._callbacks[pid]=(callback,args)\n \n \n self._do_waitpid(pid)\n \n def remove_child_handler(self,pid):\n try :\n del self._callbacks[pid]\n return True\n except KeyError:\n return False\n \n def _do_waitpid_all(self):\n \n for pid in list(self._callbacks):\n self._do_waitpid(pid)\n \n def _do_waitpid(self,expected_pid):\n assert expected_pid >0\n \n try :\n pid,status=os.waitpid(expected_pid,os.WNOHANG)\n except ChildProcessError:\n \n \n pid=expected_pid\n returncode=255\n logger.warning(\n \"Unknown child process pid %d, will report returncode 255\",\n pid)\n else :\n if pid ==0:\n \n return\n \n returncode=self._compute_returncode(status)\n if self._loop.get_debug():\n logger.debug('process %s exited with returncode %s',\n expected_pid,returncode)\n \n try :\n callback,args=self._callbacks.pop(pid)\n except KeyError:\n \n \n if self._loop.get_debug():\n logger.warning(\"Child watcher got an unexpected pid: %r\",\n pid,exc_info=True )\n else :\n callback(pid,returncode,*args)\n \n \nclass FastChildWatcher(BaseChildWatcher):\n ''\n\n\n\n\n\n\n\n \n def __init__(self):\n super().__init__()\n self._callbacks={}\n self._lock=threading.Lock()\n self._zombies={}\n self._forks=0\n \n def close(self):\n self._callbacks.clear()\n self._zombies.clear()\n super().close()\n \n def __enter__(self):\n with self._lock:\n self._forks +=1\n \n return self\n \n def __exit__(self,a,b,c):\n with self._lock:\n self._forks -=1\n \n if self._forks or not self._zombies:\n return\n \n collateral_victims=str(self._zombies)\n self._zombies.clear()\n \n logger.warning(\n \"Caught subprocesses termination from unknown pids: %s\",\n collateral_victims)\n \n def add_child_handler(self,pid,callback,*args):\n assert self._forks,\"Must use the context manager\"\n with self._lock:\n try :\n returncode=self._zombies.pop(pid)\n except KeyError:\n \n self._callbacks[pid]=callback,args\n return\n \n \n callback(pid,returncode,*args)\n \n def remove_child_handler(self,pid):\n try :\n del self._callbacks[pid]\n return True\n except KeyError:\n return False\n \n def _do_waitpid_all(self):\n \n \n while True :\n try :\n pid,status=os.waitpid(-1,os.WNOHANG)\n except ChildProcessError:\n \n return\n else :\n if pid ==0:\n \n return\n \n returncode=self._compute_returncode(status)\n \n with self._lock:\n try :\n callback,args=self._callbacks.pop(pid)\n except KeyError:\n \n if self._forks:\n \n self._zombies[pid]=returncode\n if self._loop.get_debug():\n logger.debug('unknown process %s exited '\n 'with returncode %s',\n pid,returncode)\n continue\n callback=None\n else :\n if self._loop.get_debug():\n logger.debug('process %s exited with returncode %s',\n pid,returncode)\n \n if callback is None :\n logger.warning(\n \"Caught subprocess termination from unknown pid: \"\n \"%d -> %d\",pid,returncode)\n else :\n callback(pid,returncode,*args)\n \n \nclass _UnixDefaultEventLoopPolicy(events.BaseDefaultEventLoopPolicy):\n ''\n _loop_factory=_UnixSelectorEventLoop\n \n def __init__(self):\n super().__init__()\n self._watcher=None\n \n def _init_watcher(self):\n with events._lock:\n if self._watcher is None :\n self._watcher=SafeChildWatcher()\n if isinstance(threading.current_thread(),\n threading._MainThread):\n self._watcher.attach_loop(self._local._loop)\n \n def set_event_loop(self,loop):\n ''\n\n\n\n\n \n \n super().set_event_loop(loop)\n \n if self._watcher is not None and isinstance(threading.current_thread(),threading._MainThread):\n self._watcher.attach_loop(loop)\n \n def get_child_watcher(self):\n ''\n\n\n \n if self._watcher is None :\n self._init_watcher()\n \n return self._watcher\n \n def set_child_watcher(self,watcher):\n ''\n \n assert watcher is None or isinstance(watcher,AbstractChildWatcher)\n \n if self._watcher is not None :\n self._watcher.close()\n \n self._watcher=watcher\n \nSelectorEventLoop=_UnixSelectorEventLoop\nDefaultEventLoopPolicy=_UnixDefaultEventLoopPolicy\n"], "colorsys": [".py", "''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n__all__=[\"rgb_to_yiq\",\"yiq_to_rgb\",\"rgb_to_hls\",\"hls_to_rgb\",\n\"rgb_to_hsv\",\"hsv_to_rgb\"]\n\n\n\nONE_THIRD=1.0 /3.0\nONE_SIXTH=1.0 /6.0\nTWO_THIRD=2.0 /3.0\n\n\n\n\n\ndef rgb_to_yiq(r,g,b):\n y=0.30 *r+0.59 *g+0.11 *b\n i=0.60 *r -0.28 *g -0.32 *b\n q=0.21 *r -0.52 *g+0.31 *b\n return (y,i,q)\n \ndef yiq_to_rgb(y,i,q):\n r=y+0.948262 *i+0.624013 *q\n g=y -0.276066 *i -0.639810 *q\n b=y -1.105450 *i+1.729860 *q\n if r <0.0:\n r=0.0\n if g <0.0:\n g=0.0\n if b <0.0:\n b=0.0\n if r >1.0:\n r=1.0\n if g >1.0:\n g=1.0\n if b >1.0:\n b=1.0\n return (r,g,b)\n \n \n \n \n \n \n \ndef rgb_to_hls(r,g,b):\n maxc=max(r,g,b)\n minc=min(r,g,b)\n \n l=(minc+maxc)/2.0\n if minc ==maxc:\n return 0.0,l,0.0\n if l <=0.5:\n s=(maxc -minc)/(maxc+minc)\n else :\n s=(maxc -minc)/(2.0 -maxc -minc)\n rc=(maxc -r)/(maxc -minc)\n gc=(maxc -g)/(maxc -minc)\n bc=(maxc -b)/(maxc -minc)\n if r ==maxc:\n h=bc -gc\n elif g ==maxc:\n h=2.0+rc -bc\n else :\n h=4.0+gc -rc\n h=(h /6.0)%1.0\n return h,l,s\n \ndef hls_to_rgb(h,l,s):\n if s ==0.0:\n return l,l,l\n if l <=0.5:\n m2=l *(1.0+s)\n else :\n m2=l+s -(l *s)\n m1=2.0 *l -m2\n return (_v(m1,m2,h+ONE_THIRD),_v(m1,m2,h),_v(m1,m2,h -ONE_THIRD))\n \ndef _v(m1,m2,hue):\n hue=hue %1.0\n if hue max_size:\n self.rollover()\n \n def rollover(self):\n if self._rolled:return\n file=self._file\n newfile=self._file=TemporaryFile(**self._TemporaryFileArgs)\n del self._TemporaryFileArgs\n \n newfile.write(file.getvalue())\n newfile.seek(file.tell(),0)\n \n self._rolled=True\n \n \n \n \n \n \n \n def __enter__(self):\n if self._file.closed:\n raise ValueError(\"Cannot enter context with closed file\")\n return self\n \n def __exit__(self,exc,value,tb):\n self._file.close()\n \n \n def __iter__(self):\n return self._file.__iter__()\n \n def close(self):\n self._file.close()\n \n @property\n def closed(self):\n return self._file.closed\n \n @property\n def encoding(self):\n try :\n return self._file.encoding\n except AttributeError:\n if'b'in self._TemporaryFileArgs['mode']:\n raise\n return self._TemporaryFileArgs['encoding']\n \n def fileno(self):\n self.rollover()\n return self._file.fileno()\n \n def flush(self):\n self._file.flush()\n \n def isatty(self):\n return self._file.isatty()\n \n @property\n def mode(self):\n try :\n return self._file.mode\n except AttributeError:\n return self._TemporaryFileArgs['mode']\n \n @property\n def name(self):\n try :\n return self._file.name\n except AttributeError:\n return None\n \n @property\n def newlines(self):\n try :\n return self._file.newlines\n except AttributeError:\n if'b'in self._TemporaryFileArgs['mode']:\n raise\n return self._TemporaryFileArgs['newline']\n \n def read(self,*args):\n return self._file.read(*args)\n \n def readline(self,*args):\n return self._file.readline(*args)\n \n def readlines(self,*args):\n return self._file.readlines(*args)\n \n def seek(self,*args):\n self._file.seek(*args)\n \n @property\n def softspace(self):\n return self._file.softspace\n \n def tell(self):\n return self._file.tell()\n \n def truncate(self,size=None ):\n if size is None :\n self._file.truncate()\n else :\n if size >self._max_size:\n self.rollover()\n self._file.truncate(size)\n \n def write(self,s):\n file=self._file\n rv=file.write(s)\n self._check(file)\n return rv\n \n def writelines(self,iterable):\n file=self._file\n rv=file.writelines(iterable)\n self._check(file)\n return rv\n \n \nclass TemporaryDirectory(object):\n ''\n\n\n\n\n\n\n\n\n \n \n def __init__(self,suffix=\"\",prefix=template,dir=None ):\n self._closed=False\n self.name=None\n self.name=mkdtemp(suffix,prefix,dir)\n \n def __repr__(self):\n return\"<{} {!r}>\".format(self.__class__.__name__,self.name)\n \n def __enter__(self):\n return self.name\n \n def cleanup(self,_warn=False ):\n if self.name and not self._closed:\n try :\n self._rmtree(self.name)\n except (TypeError,AttributeError)as ex:\n \n \n \n if\"None\"not in str(ex):\n raise\n print(\"ERROR: {!r} while cleaning up {!r}\".format(ex,self,),\n file=_sys.stderr)\n return\n self._closed=True\n if _warn:\n self._warn(\"Implicitly cleaning up {!r}\".format(self),\n ResourceWarning)\n \n def __exit__(self,exc,value,tb):\n self.cleanup()\n \n def __del__(self):\n \n self.cleanup(_warn=True )\n \n \n \n \n \n _listdir=staticmethod(_os.listdir)\n _path_join=staticmethod(_os.path.join)\n _isdir=staticmethod(_os.path.isdir)\n _islink=staticmethod(_os.path.islink)\n _remove=staticmethod(_os.remove)\n _rmdir=staticmethod(_os.rmdir)\n _os_error=OSError\n _warn=_warnings.warn\n \n def _rmtree(self,path):\n \n \n for name in self._listdir(path):\n fullname=self._path_join(path,name)\n try :\n isdir=self._isdir(fullname)and not self._islink(fullname)\n except self._os_error:\n isdir=False\n if isdir:\n self._rmtree(fullname)\n else :\n try :\n self._remove(fullname)\n except self._os_error:\n pass\n try :\n self._rmdir(path)\n except self._os_error:\n pass\n"], "encodings.palmos": [".py", "''\n\n\n\n\n\nimport codecs\n\n\n\nclass Codec(codecs.Codec):\n def encode(self,input,errors='strict'):\n return codecs.charmap_encode(input,errors,encoding_table)\n \n def decode(self,input,errors='strict'):\n return codecs.charmap_decode(input,errors,decoding_table)\n \nclass IncrementalEncoder(codecs.IncrementalEncoder):\n def encode(self,input,final=False ):\n return codecs.charmap_encode(input,self.errors,encoding_table)[0]\n \nclass IncrementalDecoder(codecs.IncrementalDecoder):\n def decode(self,input,final=False ):\n return codecs.charmap_decode(input,self.errors,decoding_table)[0]\n \nclass StreamWriter(Codec,codecs.StreamWriter):\n pass\n \nclass StreamReader(Codec,codecs.StreamReader):\n pass\n \n \n \ndef getregentry():\n return codecs.CodecInfo(\n name='palmos',\n encode=Codec().encode,\n decode=Codec().decode,\n incrementalencoder=IncrementalEncoder,\n incrementaldecoder=IncrementalDecoder,\n streamreader=StreamReader,\n streamwriter=StreamWriter,\n )\n \n \n \n \ndecoding_table=(\n'\\x00'\n'\\x01'\n'\\x02'\n'\\x03'\n'\\x04'\n'\\x05'\n'\\x06'\n'\\x07'\n'\\x08'\n'\\t'\n'\\n'\n'\\x0b'\n'\\x0c'\n'\\r'\n'\\x0e'\n'\\x0f'\n'\\x10'\n'\\x11'\n'\\x12'\n'\\x13'\n'\\x14'\n'\\x15'\n'\\x16'\n'\\x17'\n'\\x18'\n'\\x19'\n'\\x1a'\n'\\x1b'\n'\\x1c'\n'\\x1d'\n'\\x1e'\n'\\x1f'\n' '\n'!'\n'\"'\n'#'\n'$'\n'%'\n'&'\n\"'\"\n'('\n')'\n'*'\n'+'\n','\n'-'\n'.'\n'/'\n'0'\n'1'\n'2'\n'3'\n'4'\n'5'\n'6'\n'7'\n'8'\n'9'\n':'\n';'\n'<'\n'='\n'>'\n'?'\n'@'\n'A'\n'B'\n'C'\n'D'\n'E'\n'F'\n'G'\n'H'\n'I'\n'J'\n'K'\n'L'\n'M'\n'N'\n'O'\n'P'\n'Q'\n'R'\n'S'\n'T'\n'U'\n'V'\n'W'\n'X'\n'Y'\n'Z'\n'['\n'\\\\'\n']'\n'^'\n'_'\n'`'\n'a'\n'b'\n'c'\n'd'\n'e'\n'f'\n'g'\n'h'\n'i'\n'j'\n'k'\n'l'\n'm'\n'n'\n'o'\n'p'\n'q'\n'r'\n's'\n't'\n'u'\n'v'\n'w'\n'x'\n'y'\n'z'\n'{'\n'|'\n'}'\n'~'\n'\\x7f'\n'\\u20ac'\n'\\x81'\n'\\u201a'\n'\\u0192'\n'\\u201e'\n'\\u2026'\n'\\u2020'\n'\\u2021'\n'\\u02c6'\n'\\u2030'\n'\\u0160'\n'\\u2039'\n'\\u0152'\n'\\u2666'\n'\\u2663'\n'\\u2665'\n'\\u2660'\n'\\u2018'\n'\\u2019'\n'\\u201c'\n'\\u201d'\n'\\u2022'\n'\\u2013'\n'\\u2014'\n'\\u02dc'\n'\\u2122'\n'\\u0161'\n'\\x9b'\n'\\u0153'\n'\\x9d'\n'\\x9e'\n'\\u0178'\n'\\xa0'\n'\\xa1'\n'\\xa2'\n'\\xa3'\n'\\xa4'\n'\\xa5'\n'\\xa6'\n'\\xa7'\n'\\xa8'\n'\\xa9'\n'\\xaa'\n'\\xab'\n'\\xac'\n'\\xad'\n'\\xae'\n'\\xaf'\n'\\xb0'\n'\\xb1'\n'\\xb2'\n'\\xb3'\n'\\xb4'\n'\\xb5'\n'\\xb6'\n'\\xb7'\n'\\xb8'\n'\\xb9'\n'\\xba'\n'\\xbb'\n'\\xbc'\n'\\xbd'\n'\\xbe'\n'\\xbf'\n'\\xc0'\n'\\xc1'\n'\\xc2'\n'\\xc3'\n'\\xc4'\n'\\xc5'\n'\\xc6'\n'\\xc7'\n'\\xc8'\n'\\xc9'\n'\\xca'\n'\\xcb'\n'\\xcc'\n'\\xcd'\n'\\xce'\n'\\xcf'\n'\\xd0'\n'\\xd1'\n'\\xd2'\n'\\xd3'\n'\\xd4'\n'\\xd5'\n'\\xd6'\n'\\xd7'\n'\\xd8'\n'\\xd9'\n'\\xda'\n'\\xdb'\n'\\xdc'\n'\\xdd'\n'\\xde'\n'\\xdf'\n'\\xe0'\n'\\xe1'\n'\\xe2'\n'\\xe3'\n'\\xe4'\n'\\xe5'\n'\\xe6'\n'\\xe7'\n'\\xe8'\n'\\xe9'\n'\\xea'\n'\\xeb'\n'\\xec'\n'\\xed'\n'\\xee'\n'\\xef'\n'\\xf0'\n'\\xf1'\n'\\xf2'\n'\\xf3'\n'\\xf4'\n'\\xf5'\n'\\xf6'\n'\\xf7'\n'\\xf8'\n'\\xf9'\n'\\xfa'\n'\\xfb'\n'\\xfc'\n'\\xfd'\n'\\xfe'\n'\\xff'\n)\n\n\nencoding_table=codecs.charmap_build(decoding_table)\n"], "calendar": [".py", "''\n\n\n\n\n\n\nimport sys\nimport datetime\nimport locale as _locale\n\n__all__=[\"IllegalMonthError\",\"IllegalWeekdayError\",\"setfirstweekday\",\n\"firstweekday\",\"isleap\",\"leapdays\",\"weekday\",\"monthrange\",\n\"monthcalendar\",\"prmonth\",\"month\",\"prcal\",\"calendar\",\n\"timegm\",\"month_name\",\"month_abbr\",\"day_name\",\"day_abbr\"]\n\n\nerror=ValueError\n\n\nclass IllegalMonthError(ValueError):\n def __init__(self,month):\n self.month=month\n def __str__(self):\n return\"bad month number %r; must be 1-12\"%self.month\n \n \nclass IllegalWeekdayError(ValueError):\n def __init__(self,weekday):\n self.weekday=weekday\n def __str__(self):\n return\"bad weekday number %r; must be 0 (Monday) to 6 (Sunday)\"%self.weekday\n \n \n \nJanuary=1\nFebruary=2\n\n\nmdays=[0,31,28,31,30,31,30,31,31,30,31,30,31]\n\n\n\n\n\n\nclass _localized_month:\n\n _months=[datetime.date(2001,i+1,1).strftime for i in range(12)]\n _months.insert(0,lambda x:\"\")\n \n def __init__(self,format):\n self.format=format\n \n def __getitem__(self,i):\n funcs=self._months[i]\n if isinstance(i,slice):\n return [f(self.format)for f in funcs]\n else :\n return funcs(self.format)\n \n def __len__(self):\n return 13\n \n \nclass _localized_day:\n\n\n _days=[datetime.date(2001,1,i+1).strftime for i in range(7)]\n \n def __init__(self,format):\n self.format=format\n \n def __getitem__(self,i):\n funcs=self._days[i]\n if isinstance(i,slice):\n return [f(self.format)for f in funcs]\n else :\n return funcs(self.format)\n \n def __len__(self):\n return 7\n \n \n \nday_name=_localized_day('%A')\nday_abbr=_localized_day('%a')\n\n\nmonth_name=_localized_month('%B')\nmonth_abbr=_localized_month('%b')\n\n\n(MONDAY,TUESDAY,WEDNESDAY,THURSDAY,FRIDAY,SATURDAY,SUNDAY)=range(7)\n\n\ndef isleap(year):\n ''\n return year %4 ==0 and (year %100 !=0 or year %400 ==0)\n \n \ndef leapdays(y1,y2):\n ''\n \n y1 -=1\n y2 -=1\n return (y2 //4 -y1 //4)-(y2 //100 -y1 //100)+(y2 //400 -y1 //400)\n \n \ndef weekday(year,month,day):\n ''\n \n return datetime.date(year,month,day).weekday()\n \n \ndef monthrange(year,month):\n ''\n \n if not 1 <=month <=12:\n raise IllegalMonthError(month)\n day1=weekday(year,month,1)\n ndays=mdays[month]+(month ==February and isleap(year))\n return day1,ndays\n \n \nclass Calendar(object):\n ''\n\n\n \n \n def __init__(self,firstweekday=0):\n self.firstweekday=firstweekday\n \n def getfirstweekday(self):\n return self._firstweekday %7\n \n def setfirstweekday(self,firstweekday):\n self._firstweekday=firstweekday\n \n firstweekday=property(getfirstweekday,setfirstweekday)\n \n def iterweekdays(self):\n ''\n\n\n \n for i in range(self.firstweekday,self.firstweekday+7):\n yield i %7\n \n def itermonthdates(self,year,month):\n ''\n\n\n\n \n date=datetime.date(year,month,1)\n \n days=(date.weekday()-self.firstweekday)%7\n date -=datetime.timedelta(days=days)\n oneday=datetime.timedelta(days=1)\n while True :\n yield date\n try :\n date +=oneday\n except OverflowError:\n \n break\n if date.month !=month and date.weekday()==self.firstweekday:\n break\n \n def itermonthdays2(self,year,month):\n ''\n\n\n \n for date in self.itermonthdates(year,month):\n if date.month !=month:\n yield (0,date.weekday())\n else :\n yield (date.day,date.weekday())\n \n def itermonthdays(self,year,month):\n ''\n\n\n \n for date in self.itermonthdates(year,month):\n if date.month !=month:\n yield 0\n else :\n yield date.day\n \n def monthdatescalendar(self,year,month):\n ''\n\n\n \n dates=list(self.itermonthdates(year,month))\n return [dates[i:i+7]for i in range(0,len(dates),7)]\n \n def monthdays2calendar(self,year,month):\n ''\n\n\n\n\n \n days=list(self.itermonthdays2(year,month))\n return [days[i:i+7]for i in range(0,len(days),7)]\n \n def monthdayscalendar(self,year,month):\n ''\n\n\n \n days=list(self.itermonthdays(year,month))\n return [days[i:i+7]for i in range(0,len(days),7)]\n \n def yeardatescalendar(self,year,width=3):\n ''\n\n\n\n\n \n months=[\n self.monthdatescalendar(year,i)\n for i in range(January,January+12)\n ]\n return [months[i:i+width]for i in range(0,len(months),width)]\n \n def yeardays2calendar(self,year,width=3):\n ''\n\n\n\n\n \n months=[\n self.monthdays2calendar(year,i)\n for i in range(January,January+12)\n ]\n return [months[i:i+width]for i in range(0,len(months),width)]\n \n def yeardayscalendar(self,year,width=3):\n ''\n\n\n\n \n months=[\n self.monthdayscalendar(year,i)\n for i in range(January,January+12)\n ]\n return [months[i:i+width]for i in range(0,len(months),width)]\n \n \nclass TextCalendar(Calendar):\n ''\n\n\n \n \n def prweek(self,theweek,width):\n ''\n\n \n print(self.formatweek(theweek,width),end=' ')\n \n def formatday(self,day,weekday,width):\n ''\n\n \n if day ==0:\n s=''\n else :\n s='%2i'%day\n return s.center(width)\n \n def formatweek(self,theweek,width):\n ''\n\n \n return' '.join(self.formatday(d,wd,width)for (d,wd)in theweek)\n \n def formatweekday(self,day,width):\n ''\n\n \n if width >=9:\n names=day_name\n else :\n names=day_abbr\n return names[day][:width].center(width)\n \n def formatweekheader(self,width):\n ''\n\n \n return' '.join(self.formatweekday(i,width)for i in self.iterweekdays())\n \n def formatmonthname(self,theyear,themonth,width,withyear=True ):\n ''\n\n \n s=month_name[themonth]\n if withyear:\n s=\"%s %r\"%(s,theyear)\n return s.center(width)\n \n def prmonth(self,theyear,themonth,w=0,l=0):\n ''\n\n \n print(self.formatmonth(theyear,themonth,w,l),end=' ')\n \n def formatmonth(self,theyear,themonth,w=0,l=0):\n ''\n\n \n w=max(2,w)\n l=max(1,l)\n s=self.formatmonthname(theyear,themonth,7 *(w+1)-1)\n s=s.rstrip()\n s +='\\n'*l\n s +=self.formatweekheader(w).rstrip()\n s +='\\n'*l\n for week in self.monthdays2calendar(theyear,themonth):\n s +=self.formatweek(week,w).rstrip()\n s +='\\n'*l\n return s\n \n def formatyear(self,theyear,w=2,l=1,c=6,m=3):\n ''\n\n \n w=max(2,w)\n l=max(1,l)\n c=max(2,c)\n colwidth=(w+1)*7 -1\n v=[]\n a=v.append\n a(repr(theyear).center(colwidth *m+c *(m -1)).rstrip())\n a('\\n'*l)\n header=self.formatweekheader(w)\n for (i,row)in enumerate(self.yeardays2calendar(theyear,m)):\n \n months=range(m *i+1,min(m *(i+1)+1,13))\n a('\\n'*l)\n names=(self.formatmonthname(theyear,k,colwidth,False )\n for k in months)\n a(formatstring(names,colwidth,c).rstrip())\n a('\\n'*l)\n headers=(header for k in months)\n a(formatstring(headers,colwidth,c).rstrip())\n a('\\n'*l)\n \n height=max(len(cal)for cal in row)\n for j in range(height):\n weeks=[]\n for cal in row:\n if j >=len(cal):\n weeks.append('')\n else :\n weeks.append(self.formatweek(cal[j],w))\n a(formatstring(weeks,colwidth,c).rstrip())\n a('\\n'*l)\n return''.join(v)\n \n def pryear(self,theyear,w=0,l=0,c=6,m=3):\n ''\n print(self.formatyear(theyear,w,l,c,m))\n \n \nclass HTMLCalendar(Calendar):\n ''\n\n \n \n \n cssclasses=[\"mon\",\"tue\",\"wed\",\"thu\",\"fri\",\"sat\",\"sun\"]\n \n def formatday(self,day,weekday):\n ''\n\n \n if day ==0:\n return' '\n else :\n return'%d'%(self.cssclasses[weekday],day)\n \n def formatweek(self,theweek):\n ''\n\n \n s=''.join(self.formatday(d,wd)for (d,wd)in theweek)\n return'%s'%s\n \n def formatweekday(self,day):\n ''\n\n \n return'%s'%(self.cssclasses[day],day_abbr[day])\n \n def formatweekheader(self):\n ''\n\n \n s=''.join(self.formatweekday(i)for i in self.iterweekdays())\n return'%s'%s\n \n def formatmonthname(self,theyear,themonth,withyear=True ):\n ''\n\n \n if withyear:\n s='%s %s'%(month_name[themonth],theyear)\n else :\n s='%s'%month_name[themonth]\n return'%s'%s\n \n def formatmonth(self,theyear,themonth,withyear=True ):\n ''\n\n \n v=[]\n a=v.append\n a('')\n a('\\n')\n a(self.formatmonthname(theyear,themonth,withyear=withyear))\n a('\\n')\n a(self.formatweekheader())\n a('\\n')\n for week in self.monthdays2calendar(theyear,themonth):\n a(self.formatweek(week))\n a('\\n')\n a('
')\n a('\\n')\n return''.join(v)\n \n def formatyear(self,theyear,width=3):\n ''\n\n \n v=[]\n a=v.append\n width=max(width,1)\n a('')\n a('\\n')\n a(''%(width,theyear))\n for i in range(January,January+12,width):\n \n months=range(i,min(i+width,13))\n a('')\n for m in months:\n a('')\n a('')\n a('
%s
')\n a(self.formatmonth(theyear,m,withyear=False ))\n a('
')\n return''.join(v)\n \n def formatyearpage(self,theyear,width=3,css='calendar.css',encoding=None ):\n ''\n\n \n if encoding is None :\n encoding=sys.getdefaultencoding()\n v=[]\n a=v.append\n a('\\n'%encoding)\n a('\\n')\n a('\\n')\n a('\\n')\n a('\\n'%encoding)\n if css is not None :\n a('\\n'%css)\n a('Calendar for %d\\n'%theyear)\n a('\\n')\n a('\\n')\n a(self.formatyear(theyear,width))\n a('\\n')\n a('\\n')\n return''.join(v).encode(encoding,\"xmlcharrefreplace\")\n \n \nclass different_locale:\n def __init__(self,locale):\n self.locale=locale\n \n def __enter__(self):\n self.oldlocale=_locale.getlocale(_locale.LC_TIME)\n _locale.setlocale(_locale.LC_TIME,self.locale)\n \n def __exit__(self,*args):\n _locale.setlocale(_locale.LC_TIME,self.oldlocale)\n \n \nclass LocaleTextCalendar(TextCalendar):\n ''\n\n\n\n\n \n \n def __init__(self,firstweekday=0,locale=None ):\n TextCalendar.__init__(self,firstweekday)\n if locale is None :\n locale=_locale.getdefaultlocale()\n self.locale=locale\n \n def formatweekday(self,day,width):\n with different_locale(self.locale):\n if width >=9:\n names=day_name\n else :\n names=day_abbr\n name=names[day]\n return name[:width].center(width)\n \n def formatmonthname(self,theyear,themonth,width,withyear=True ):\n with different_locale(self.locale):\n s=month_name[themonth]\n if withyear:\n s=\"%s %r\"%(s,theyear)\n return s.center(width)\n \n \nclass LocaleHTMLCalendar(HTMLCalendar):\n ''\n\n\n\n\n \n def __init__(self,firstweekday=0,locale=None ):\n HTMLCalendar.__init__(self,firstweekday)\n if locale is None :\n locale=_locale.getdefaultlocale()\n self.locale=locale\n \n def formatweekday(self,day):\n with different_locale(self.locale):\n s=day_abbr[day]\n return'%s'%(self.cssclasses[day],s)\n \n def formatmonthname(self,theyear,themonth,withyear=True ):\n with different_locale(self.locale):\n s=month_name[themonth]\n if withyear:\n s='%s %s'%(s,theyear)\n return'%s'%s\n \n \n \nc=TextCalendar()\n\nfirstweekday=c.getfirstweekday\n\ndef setfirstweekday(firstweekday):\n if not MONDAY <=firstweekday <=SUNDAY:\n raise IllegalWeekdayError(firstweekday)\n c.firstweekday=firstweekday\n \nmonthcalendar=c.monthdayscalendar\nprweek=c.prweek\nweek=c.formatweek\nweekheader=c.formatweekheader\nprmonth=c.prmonth\nmonth=c.formatmonth\ncalendar=c.formatyear\nprcal=c.pryear\n\n\n\n_colwidth=7 *3 -1\n_spacing=6\n\n\ndef format(cols,colwidth=_colwidth,spacing=_spacing):\n ''\n print(formatstring(cols,colwidth,spacing))\n \n \ndef formatstring(cols,colwidth=_colwidth,spacing=_spacing):\n ''\n spacing *=' '\n return spacing.join(c.center(colwidth)for c in cols)\n \n \nEPOCH=1970\n_EPOCH_ORD=datetime.date(EPOCH,1,1).toordinal()\n\n\ndef timegm(tuple):\n ''\n year,month,day,hour,minute,second=tuple[:6]\n days=datetime.date(year,month,1).toordinal()-_EPOCH_ORD+day -1\n hours=days *24+hour\n minutes=hours *60+minute\n seconds=minutes *60+second\n return seconds\n \n \ndef main(args):\n import optparse\n parser=optparse.OptionParser(usage=\"usage: %prog [options] [year [month]]\")\n parser.add_option(\n \"-w\",\"--width\",\n dest=\"width\",type=\"int\",default=2,\n help=\"width of date column (default 2, text only)\"\n )\n parser.add_option(\n \"-l\",\"--lines\",\n dest=\"lines\",type=\"int\",default=1,\n help=\"number of lines for each week (default 1, text only)\"\n )\n parser.add_option(\n \"-s\",\"--spacing\",\n dest=\"spacing\",type=\"int\",default=6,\n help=\"spacing between months (default 6, text only)\"\n )\n parser.add_option(\n \"-m\",\"--months\",\n dest=\"months\",type=\"int\",default=3,\n help=\"months per row (default 3, text only)\"\n )\n parser.add_option(\n \"-c\",\"--css\",\n dest=\"css\",default=\"calendar.css\",\n help=\"CSS to use for page (html only)\"\n )\n parser.add_option(\n \"-L\",\"--locale\",\n dest=\"locale\",default=None ,\n help=\"locale to be used from month and weekday names\"\n )\n parser.add_option(\n \"-e\",\"--encoding\",\n dest=\"encoding\",default=None ,\n help=\"Encoding to use for output.\"\n )\n parser.add_option(\n \"-t\",\"--type\",\n dest=\"type\",default=\"text\",\n choices=(\"text\",\"html\"),\n help=\"output type (text or html)\"\n )\n \n (options,args)=parser.parse_args(args)\n \n if options.locale and not options.encoding:\n parser.error(\"if --locale is specified --encoding is required\")\n sys.exit(1)\n \n locale=options.locale,options.encoding\n \n if options.type ==\"html\":\n if options.locale:\n cal=LocaleHTMLCalendar(locale=locale)\n else :\n cal=HTMLCalendar()\n encoding=options.encoding\n if encoding is None :\n encoding=sys.getdefaultencoding()\n optdict=dict(encoding=encoding,css=options.css)\n write=sys.stdout.buffer.write\n if len(args)==1:\n write(cal.formatyearpage(datetime.date.today().year,**optdict))\n elif len(args)==2:\n write(cal.formatyearpage(int(args[1]),**optdict))\n else :\n parser.error(\"incorrect number of arguments\")\n sys.exit(1)\n else :\n if options.locale:\n cal=LocaleTextCalendar(locale=locale)\n else :\n cal=TextCalendar()\n optdict=dict(w=options.width,l=options.lines)\n if len(args)!=3:\n optdict[\"c\"]=options.spacing\n optdict[\"m\"]=options.months\n if len(args)==1:\n result=cal.formatyear(datetime.date.today().year,**optdict)\n elif len(args)==2:\n result=cal.formatyear(int(args[1]),**optdict)\n elif len(args)==3:\n result=cal.formatmonth(int(args[1]),int(args[2]),**optdict)\n else :\n parser.error(\"incorrect number of arguments\")\n sys.exit(1)\n write=sys.stdout.write\n if options.encoding:\n result=result.encode(options.encoding)\n write=sys.stdout.buffer.write\n write(result)\n \n \nif __name__ ==\"__main__\":\n main(sys.argv)\n"], "encodings.cp1251": [".py", "''\n\n\n\nimport codecs\n\n\n\nclass Codec(codecs.Codec):\n\n def encode(self,input,errors='strict'):\n return codecs.charmap_encode(input,errors,encoding_table)\n \n def decode(self,input,errors='strict'):\n return codecs.charmap_decode(input,errors,decoding_table)\n \nclass IncrementalEncoder(codecs.IncrementalEncoder):\n def encode(self,input,final=False ):\n return codecs.charmap_encode(input,self.errors,encoding_table)[0]\n \nclass IncrementalDecoder(codecs.IncrementalDecoder):\n def decode(self,input,final=False ):\n return codecs.charmap_decode(input,self.errors,decoding_table)[0]\n \nclass StreamWriter(Codec,codecs.StreamWriter):\n pass\n \nclass StreamReader(Codec,codecs.StreamReader):\n pass\n \n \n \ndef getregentry():\n return codecs.CodecInfo(\n name='cp1251',\n encode=Codec().encode,\n decode=Codec().decode,\n incrementalencoder=IncrementalEncoder,\n incrementaldecoder=IncrementalDecoder,\n streamreader=StreamReader,\n streamwriter=StreamWriter,\n )\n \n \n \n \ndecoding_table=(\n'\\x00'\n'\\x01'\n'\\x02'\n'\\x03'\n'\\x04'\n'\\x05'\n'\\x06'\n'\\x07'\n'\\x08'\n'\\t'\n'\\n'\n'\\x0b'\n'\\x0c'\n'\\r'\n'\\x0e'\n'\\x0f'\n'\\x10'\n'\\x11'\n'\\x12'\n'\\x13'\n'\\x14'\n'\\x15'\n'\\x16'\n'\\x17'\n'\\x18'\n'\\x19'\n'\\x1a'\n'\\x1b'\n'\\x1c'\n'\\x1d'\n'\\x1e'\n'\\x1f'\n' '\n'!'\n'\"'\n'#'\n'$'\n'%'\n'&'\n\"'\"\n'('\n')'\n'*'\n'+'\n','\n'-'\n'.'\n'/'\n'0'\n'1'\n'2'\n'3'\n'4'\n'5'\n'6'\n'7'\n'8'\n'9'\n':'\n';'\n'<'\n'='\n'>'\n'?'\n'@'\n'A'\n'B'\n'C'\n'D'\n'E'\n'F'\n'G'\n'H'\n'I'\n'J'\n'K'\n'L'\n'M'\n'N'\n'O'\n'P'\n'Q'\n'R'\n'S'\n'T'\n'U'\n'V'\n'W'\n'X'\n'Y'\n'Z'\n'['\n'\\\\'\n']'\n'^'\n'_'\n'`'\n'a'\n'b'\n'c'\n'd'\n'e'\n'f'\n'g'\n'h'\n'i'\n'j'\n'k'\n'l'\n'm'\n'n'\n'o'\n'p'\n'q'\n'r'\n's'\n't'\n'u'\n'v'\n'w'\n'x'\n'y'\n'z'\n'{'\n'|'\n'}'\n'~'\n'\\x7f'\n'\\u0402'\n'\\u0403'\n'\\u201a'\n'\\u0453'\n'\\u201e'\n'\\u2026'\n'\\u2020'\n'\\u2021'\n'\\u20ac'\n'\\u2030'\n'\\u0409'\n'\\u2039'\n'\\u040a'\n'\\u040c'\n'\\u040b'\n'\\u040f'\n'\\u0452'\n'\\u2018'\n'\\u2019'\n'\\u201c'\n'\\u201d'\n'\\u2022'\n'\\u2013'\n'\\u2014'\n'\\ufffe'\n'\\u2122'\n'\\u0459'\n'\\u203a'\n'\\u045a'\n'\\u045c'\n'\\u045b'\n'\\u045f'\n'\\xa0'\n'\\u040e'\n'\\u045e'\n'\\u0408'\n'\\xa4'\n'\\u0490'\n'\\xa6'\n'\\xa7'\n'\\u0401'\n'\\xa9'\n'\\u0404'\n'\\xab'\n'\\xac'\n'\\xad'\n'\\xae'\n'\\u0407'\n'\\xb0'\n'\\xb1'\n'\\u0406'\n'\\u0456'\n'\\u0491'\n'\\xb5'\n'\\xb6'\n'\\xb7'\n'\\u0451'\n'\\u2116'\n'\\u0454'\n'\\xbb'\n'\\u0458'\n'\\u0405'\n'\\u0455'\n'\\u0457'\n'\\u0410'\n'\\u0411'\n'\\u0412'\n'\\u0413'\n'\\u0414'\n'\\u0415'\n'\\u0416'\n'\\u0417'\n'\\u0418'\n'\\u0419'\n'\\u041a'\n'\\u041b'\n'\\u041c'\n'\\u041d'\n'\\u041e'\n'\\u041f'\n'\\u0420'\n'\\u0421'\n'\\u0422'\n'\\u0423'\n'\\u0424'\n'\\u0425'\n'\\u0426'\n'\\u0427'\n'\\u0428'\n'\\u0429'\n'\\u042a'\n'\\u042b'\n'\\u042c'\n'\\u042d'\n'\\u042e'\n'\\u042f'\n'\\u0430'\n'\\u0431'\n'\\u0432'\n'\\u0433'\n'\\u0434'\n'\\u0435'\n'\\u0436'\n'\\u0437'\n'\\u0438'\n'\\u0439'\n'\\u043a'\n'\\u043b'\n'\\u043c'\n'\\u043d'\n'\\u043e'\n'\\u043f'\n'\\u0440'\n'\\u0441'\n'\\u0442'\n'\\u0443'\n'\\u0444'\n'\\u0445'\n'\\u0446'\n'\\u0447'\n'\\u0448'\n'\\u0449'\n'\\u044a'\n'\\u044b'\n'\\u044c'\n'\\u044d'\n'\\u044e'\n'\\u044f'\n)\n\n\nencoding_table=codecs.charmap_build(decoding_table)\n"], "concurrent.futures.process": [".py", "\n\n\n\"\"\"Implements ProcessPoolExecutor.\n\nThe follow diagram and text describe the data-flow through the system:\n\n|======================= In-process =====================|== Out-of-process ==|\n\n+----------+ +----------+ +--------+ +-----------+ +---------+\n| | => | Work Ids | => | | => | Call Q | => | |\n| | +----------+ | | +-----------+ | |\n| | | ... | | | | ... | | |\n| | | 6 | | | | 5, call() | | |\n| | | 7 | | | | ... | | |\n| Process | | ... | | Local | +-----------+ | Process |\n| Pool | +----------+ | Worker | | #1..n |\n| Executor | | Thread | | |\n| | +----------- + | | +-----------+ | |\n| | <=> | Work Items | <=> | | <= | Result Q | <= | |\n| | +------------+ | | +-----------+ | |\n| | | 6: call() | | | | ... | | |\n| | | future | | | | 4, result | | |\n| | | ... | | | | 3, except | | |\n+----------+ +------------+ +--------+ +-----------+ +---------+\n\nExecutor.submit() called:\n- creates a uniquely numbered _WorkItem and adds it to the \"Work Items\" dict\n- adds the id of the _WorkItem to the \"Work Ids\" queue\n\nLocal worker thread:\n- reads work ids from the \"Work Ids\" queue and looks up the corresponding\n WorkItem from the \"Work Items\" dict: if the work item has been cancelled then\n it is simply removed from the dict, otherwise it is repackaged as a\n _CallItem and put in the \"Call Q\". New _CallItems are put in the \"Call Q\"\n until \"Call Q\" is full. NOTE: the size of the \"Call Q\" is kept small because\n calls placed in the \"Call Q\" can no longer be cancelled with Future.cancel().\n- reads _ResultItems from \"Result Q\", updates the future stored in the\n \"Work Items\" dict and deletes the dict entry\n\nProcess #1..n:\n- reads _CallItems from \"Call Q\", executes the calls, and puts the resulting\n _ResultItems in \"Result Q\"\n\"\"\"\n\n__author__='Brian Quinlan (brian@sweetapp.com)'\n\nimport atexit\nimport os\nfrom concurrent.futures import _base\nimport queue\nfrom queue import Full\nimport multiprocessing\nfrom multiprocessing import SimpleQueue\nfrom multiprocessing.connection import wait\nimport threading\nimport weakref\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n_threads_queues=weakref.WeakKeyDictionary()\n_shutdown=False\n\ndef _python_exit():\n global _shutdown\n _shutdown=True\n items=list(_threads_queues.items())\n for t,q in items:\n q.put(None )\n for t,q in items:\n t.join()\n \n \n \n \n \nEXTRA_QUEUED_CALLS=1\n\nclass _WorkItem(object):\n def __init__(self,future,fn,args,kwargs):\n self.future=future\n self.fn=fn\n self.args=args\n self.kwargs=kwargs\n \nclass _ResultItem(object):\n def __init__(self,work_id,exception=None ,result=None ):\n self.work_id=work_id\n self.exception=exception\n self.result=result\n \nclass _CallItem(object):\n def __init__(self,work_id,fn,args,kwargs):\n self.work_id=work_id\n self.fn=fn\n self.args=args\n self.kwargs=kwargs\n \ndef _process_worker(call_queue,result_queue):\n ''\n\n\n\n\n\n\n\n\n\n\n \n while True :\n call_item=call_queue.get(block=True )\n if call_item is None :\n \n result_queue.put(os.getpid())\n return\n try :\n r=call_item.fn(*call_item.args,**call_item.kwargs)\n except BaseException as e:\n result_queue.put(_ResultItem(call_item.work_id,\n exception=e))\n else :\n result_queue.put(_ResultItem(call_item.work_id,\n result=r))\n \ndef _add_call_item_to_queue(pending_work_items,\nwork_ids,\ncall_queue):\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n \n while True :\n if call_queue.full():\n return\n try :\n work_id=work_ids.get(block=False )\n except queue.Empty:\n return\n else :\n work_item=pending_work_items[work_id]\n \n if work_item.future.set_running_or_notify_cancel():\n call_queue.put(_CallItem(work_id,\n work_item.fn,\n work_item.args,\n work_item.kwargs),\n block=True )\n else :\n del pending_work_items[work_id]\n continue\n \ndef _queue_management_worker(executor_reference,\nprocesses,\npending_work_items,\nwork_ids_queue,\ncall_queue,\nresult_queue):\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n executor=None\n \n def shutting_down():\n return _shutdown or executor is None or executor._shutdown_thread\n \n def shutdown_worker():\n \n nb_children_alive=sum(p.is_alive()for p in processes.values())\n for i in range(0,nb_children_alive):\n call_queue.put_nowait(None )\n \n call_queue.close()\n \n \n for p in processes.values():\n p.join()\n \n reader=result_queue._reader\n \n while True :\n _add_call_item_to_queue(pending_work_items,\n work_ids_queue,\n call_queue)\n \n sentinels=[p.sentinel for p in processes.values()]\n assert sentinels\n ready=wait([reader]+sentinels)\n if reader in ready:\n result_item=reader.recv()\n else :\n \n executor=executor_reference()\n if executor is not None :\n executor._broken=True\n executor._shutdown_thread=True\n executor=None\n \n for work_id,work_item in pending_work_items.items():\n work_item.future.set_exception(\n BrokenProcessPool(\n \"A process in the process pool was \"\n \"terminated abruptly while the future was \"\n \"running or pending.\"\n ))\n \n del work_item\n pending_work_items.clear()\n \n \n for p in processes.values():\n p.terminate()\n shutdown_worker()\n return\n if isinstance(result_item,int):\n \n \n assert shutting_down()\n p=processes.pop(result_item)\n p.join()\n if not processes:\n shutdown_worker()\n return\n elif result_item is not None :\n work_item=pending_work_items.pop(result_item.work_id,None )\n \n if work_item is not None :\n if result_item.exception:\n work_item.future.set_exception(result_item.exception)\n else :\n work_item.future.set_result(result_item.result)\n \n del work_item\n \n executor=executor_reference()\n \n \n \n \n if shutting_down():\n try :\n \n \n if not pending_work_items:\n shutdown_worker()\n return\n except Full:\n \n \n pass\n executor=None\n \n_system_limits_checked=False\n_system_limited=None\ndef _check_system_limits():\n global _system_limits_checked,_system_limited\n if _system_limits_checked:\n if _system_limited:\n raise NotImplementedError(_system_limited)\n _system_limits_checked=True\n try :\n nsems_max=os.sysconf(\"SC_SEM_NSEMS_MAX\")\n except (AttributeError,ValueError):\n \n return\n if nsems_max ==-1:\n \n \n return\n if nsems_max >=256:\n \n \n return\n _system_limited=\"system provides too few semaphores (%d available, 256 necessary)\"%nsems_max\n raise NotImplementedError(_system_limited)\n \n \nclass BrokenProcessPool(RuntimeError):\n ''\n\n\n \n \n \nclass ProcessPoolExecutor(_base.Executor):\n def __init__(self,max_workers=None ):\n ''\n\n\n\n\n\n \n _check_system_limits()\n \n if max_workers is None :\n self._max_workers=os.cpu_count()or 1\n else :\n self._max_workers=max_workers\n \n \n \n \n self._call_queue=multiprocessing.Queue(self._max_workers+\n EXTRA_QUEUED_CALLS)\n \n \n \n self._call_queue._ignore_epipe=True\n self._result_queue=SimpleQueue()\n self._work_ids=queue.Queue()\n self._queue_management_thread=None\n \n self._processes={}\n \n \n self._shutdown_thread=False\n self._shutdown_lock=threading.Lock()\n self._broken=False\n self._queue_count=0\n self._pending_work_items={}\n \n def _start_queue_management_thread(self):\n \n \n def weakref_cb(_,q=self._result_queue):\n q.put(None )\n if self._queue_management_thread is None :\n \n self._adjust_process_count()\n self._queue_management_thread=threading.Thread(\n target=_queue_management_worker,\n args=(weakref.ref(self,weakref_cb),\n self._processes,\n self._pending_work_items,\n self._work_ids,\n self._call_queue,\n self._result_queue))\n self._queue_management_thread.daemon=True\n self._queue_management_thread.start()\n _threads_queues[self._queue_management_thread]=self._result_queue\n \n def _adjust_process_count(self):\n for _ in range(len(self._processes),self._max_workers):\n p=multiprocessing.Process(\n target=_process_worker,\n args=(self._call_queue,\n self._result_queue))\n p.start()\n self._processes[p.pid]=p\n \n def submit(self,fn,*args,**kwargs):\n with self._shutdown_lock:\n if self._broken:\n raise BrokenProcessPool('A child process terminated '\n 'abruptly, the process pool is not usable anymore')\n if self._shutdown_thread:\n raise RuntimeError('cannot schedule new futures after shutdown')\n \n f=_base.Future()\n w=_WorkItem(f,fn,args,kwargs)\n \n self._pending_work_items[self._queue_count]=w\n self._work_ids.put(self._queue_count)\n self._queue_count +=1\n \n self._result_queue.put(None )\n \n self._start_queue_management_thread()\n return f\n submit.__doc__=_base.Executor.submit.__doc__\n \n def shutdown(self,wait=True ):\n with self._shutdown_lock:\n self._shutdown_thread=True\n if self._queue_management_thread:\n \n self._result_queue.put(None )\n if wait:\n self._queue_management_thread.join()\n \n \n self._queue_management_thread=None\n self._call_queue=None\n self._result_queue=None\n self._processes=None\n shutdown.__doc__=_base.Executor.shutdown.__doc__\n \natexit.register(_python_exit)\n"], "imp": [".py", "''\n\n\n\n\n\n\n\nfrom _imp import (lock_held,acquire_lock,release_lock,\nget_frozen_object,is_frozen_package,\ninit_builtin,init_frozen,is_builtin,is_frozen,\n_fix_co_filename)\ntry :\n from _imp import load_dynamic\nexcept ImportError:\n\n load_dynamic=None\n \n \nfrom importlib._bootstrap import new_module\nfrom importlib._bootstrap import cache_from_source,source_from_cache\n\n\nfrom importlib import _bootstrap\n\n\nimport importlib.machinery as machinery\nimport os\nimport sys\nimport tokenize\nimport warnings\n\n\n\nSEARCH_ERROR=0\nPY_SOURCE=1\nPY_COMPILED=2\nC_EXTENSION=3\nPY_RESOURCE=4\nPKG_DIRECTORY=5\nC_BUILTIN=6\nPY_FROZEN=7\nPY_CODERESOURCE=8\nIMP_HOOK=9\n\n\ndef get_magic():\n ''\n return _bootstrap._MAGIC_BYTES\n \n \ndef get_tag():\n ''\n return sys.implementation.cache_tag\n \n \ndef get_suffixes():\n warnings.warn('imp.get_suffixes() is deprecated; use the constants '\n 'defined on importlib.machinery instead',\n DeprecationWarning,2)\n extensions=[(s,'rb',C_EXTENSION)for s in machinery.EXTENSION_SUFFIXES]\n source=[(s,'U',PY_SOURCE)for s in machinery.SOURCE_SUFFIXES]\n bytecode=[(s,'rb',PY_COMPILED)for s in machinery.BYTECODE_SUFFIXES]\n \n return extensions+source+bytecode\n \n \nclass NullImporter:\n\n ''\n \n def __init__(self,path):\n if path =='':\n raise ImportError('empty pathname',path='')\n elif os.path.isdir(path):\n raise ImportError('existing directory',path=path)\n \n def find_module(self,fullname):\n ''\n return None\n \n \nclass _HackedGetData:\n\n ''\n \n \n def __init__(self,fullname,path,file=None ):\n super().__init__(fullname,path)\n self.file=file\n \n def get_data(self,path):\n ''\n if self.file and path ==self.path:\n if not self.file.closed:\n file=self.file\n else :\n self.file=file=open(self.path,'r')\n \n with file:\n \n \n \n \n \n return file.read()\n else :\n return super().get_data(path)\n \n \nclass _LoadSourceCompatibility(_HackedGetData,_bootstrap.SourceFileLoader):\n\n ''\n \n pass\n \ndef load_source(name,pathname,file=None ):\n msg=('imp.load_source() is deprecated; use '\n 'importlib.machinery.SourceFileLoader(name, pathname).load_module()'\n ' instead')\n warnings.warn(msg,DeprecationWarning,2)\n _LoadSourceCompatibility(name,pathname,file).load_module(name)\n module=sys.modules[name]\n \n \n module.__loader__=_bootstrap.SourceFileLoader(name,pathname)\n return module\n \n \nclass _LoadCompiledCompatibility(_HackedGetData,\n_bootstrap.SourcelessFileLoader):\n\n ''\n \n pass\n \ndef load_compiled(name,pathname,file=None ):\n msg=('imp.load_compiled() is deprecated; use '\n 'importlib.machinery.SourcelessFileLoader(name, pathname).'\n 'load_module() instead ')\n warnings.warn(msg,DeprecationWarning,2)\n _LoadCompiledCompatibility(name,pathname,file).load_module(name)\n module=sys.modules[name]\n \n \n module.__loader__=_bootstrap.SourcelessFileLoader(name,pathname)\n return module\n \n \ndef load_package(name,path):\n msg=('imp.load_package() is deprecated; use either '\n 'importlib.machinery.SourceFileLoader() or '\n 'importlib.machinery.SourcelessFileLoader() instead')\n warnings.warn(msg,DeprecationWarning,2)\n if os.path.isdir(path):\n extensions=(machinery.SOURCE_SUFFIXES[:]+\n machinery.BYTECODE_SUFFIXES[:])\n for extension in extensions:\n path=os.path.join(path,'__init__'+extension)\n if os.path.exists(path):\n break\n else :\n raise ValueError('{!r} is not a package'.format(path))\n return _bootstrap.SourceFileLoader(name,path).load_module(name)\n \n \ndef load_module(name,file,filename,details):\n ''\n\n\n\n\n\n \n suffix,mode,type_=details\n with warnings.catch_warnings():\n warnings.simplefilter('ignore')\n if mode and (not mode.startswith(('r','U'))or'+'in mode):\n raise ValueError('invalid file open mode {!r}'.format(mode))\n elif file is None and type_ in {PY_SOURCE,PY_COMPILED}:\n msg='file object required for import (type code {})'.format(type_)\n raise ValueError(msg)\n elif type_ ==PY_SOURCE:\n return load_source(name,filename,file)\n elif type_ ==PY_COMPILED:\n return load_compiled(name,filename,file)\n elif type_ ==C_EXTENSION and load_dynamic is not None :\n if file is None :\n with open(filename,'rb')as opened_file:\n return load_dynamic(name,filename,opened_file)\n else :\n return load_dynamic(name,filename,file)\n elif type_ ==PKG_DIRECTORY:\n return load_package(name,filename)\n elif type_ ==C_BUILTIN:\n return init_builtin(name)\n elif type_ ==PY_FROZEN:\n return init_frozen(name)\n else :\n msg=\"Don't know how to import {} (type code {})\".format(name,type_)\n raise ImportError(msg,name=name)\n \n \ndef find_module(name,path=None ):\n ''\n\n\n\n\n\n\n\n\n \n if not isinstance(name,str):\n raise TypeError(\"'name' must be a str, not {}\".format(type(name)))\n elif not isinstance(path,(type(None ),list)):\n \n raise RuntimeError(\"'list' must be None or a list, \"\n \"not {}\".format(type(name)))\n \n if path is None :\n if is_builtin(name):\n return None ,None ,('','',C_BUILTIN)\n elif is_frozen(name):\n return None ,None ,('','',PY_FROZEN)\n else :\n path=sys.path\n \n for entry in path:\n package_directory=os.path.join(entry,name)\n for suffix in ['.py',machinery.BYTECODE_SUFFIXES[0]]:\n package_file_name='__init__'+suffix\n file_path=os.path.join(package_directory,package_file_name)\n if os.path.isfile(file_path):\n return None ,package_directory,('','',PKG_DIRECTORY)\n with warnings.catch_warnings():\n warnings.simplefilter('ignore')\n for suffix,mode,type_ in get_suffixes():\n file_name=name+suffix\n file_path=os.path.join(entry,file_name)\n if os.path.isfile(file_path):\n break\n else :\n continue\n break\n else :\n raise ImportError(_bootstrap._ERR_MSG.format(name),name=name)\n \n encoding=None\n if mode =='U':\n with open(file_path,'rb')as file:\n encoding=tokenize.detect_encoding(file.readline)[0]\n file=open(file_path,mode,encoding=encoding)\n return file,file_path,(suffix,mode,type_)\n \n \n_RELOADING={}\n\ndef reload(module):\n ''\n\n\n\n \n if not module or type(module)!=type(sys):\n raise TypeError(\"reload() argument must be module\")\n name=module.__name__\n if name not in sys.modules:\n msg=\"module {} not in sys.modules\"\n raise ImportError(msg.format(name),name=name)\n if name in _RELOADING:\n return _RELOADING[name]\n _RELOADING[name]=module\n try :\n parent_name=name.rpartition('.')[0]\n if parent_name and parent_name not in sys.modules:\n msg=\"parent {!r} not in sys.modules\"\n raise ImportError(msg.format(parent_name),name=parent_name)\n module.__loader__.load_module(name)\n \n return sys.modules[module.__name__]\n finally :\n try :\n del _RELOADING[name]\n except KeyError:\n pass\n"], "multiprocessing.dummy": [".py", "\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n__all__=[\n'Process','current_process','active_children','freeze_support',\n'Lock','RLock','Semaphore','BoundedSemaphore','Condition',\n'Event','Barrier','Queue','Manager','Pipe','Pool','JoinableQueue'\n]\n\n\n\n\n\nimport threading\nimport sys\nimport weakref\n\n\n\nfrom multiprocessing.dummy.connection import Pipe\nfrom threading import Lock,RLock,Semaphore,BoundedSemaphore\nfrom threading import Event,Condition,Barrier\nfrom queue import Queue\n\n\n\n\n\nclass DummyProcess(threading.Thread):\n\n def __init__(self,group=None ,target=None ,name=None ,args=(),kwargs={}):\n threading.Thread.__init__(self,group,target,name,args,kwargs)\n self._pid=None\n self._children=weakref.WeakKeyDictionary()\n self._start_called=False\n self._parent=current_process()\n \n def start(self):\n assert self._parent is current_process()\n self._start_called=True\n if hasattr(self._parent,'_children'):\n self._parent._children[self]=None\n threading.Thread.start(self)\n \n @property\n def exitcode(self):\n if self._start_called and not self.is_alive():\n return 0\n else :\n return None\n \n \n \n \n \nProcess=DummyProcess\ncurrent_process=threading.current_thread\ncurrent_process()._children=weakref.WeakKeyDictionary()\n\ndef active_children():\n children=current_process()._children\n for p in list(children):\n if not p.is_alive():\n children.pop(p,None )\n return list(children)\n \ndef freeze_support():\n pass\n \n \n \n \n \nclass Namespace(object):\n def __init__(self,**kwds):\n self.__dict__.update(kwds)\n def __repr__(self):\n items=list(self.__dict__.items())\n temp=[]\n for name,value in items:\n if not name.startswith('_'):\n temp.append('%s=%r'%(name,value))\n temp.sort()\n return'Namespace(%s)'%str.join(', ',temp)\n \ndict=dict\nlist=list\n\n\n\n\n\nclass Value(object):\n def __init__(self,typecode,value,lock=True ):\n self._typecode=typecode\n self._value=value\n def _get(self):\n return self._value\n def _set(self,value):\n self._value=value\n value=property(_get,_set)\n def __repr__(self):\n return'<%r(%r, %r)>'%(type(self).__name__,self._typecode,self._value)\n \ndef Manager():\n return sys.modules[__name__]\n \ndef shutdown():\n pass\n \ndef Pool(processes=None ,initializer=None ,initargs=()):\n from multiprocessing.pool import ThreadPool\n return ThreadPool(processes,initializer,initargs)\n \nJoinableQueue=Queue\n", 1], "_codecs": [".py", "\ndef ascii_decode(*args,**kw):\n pass\n \ndef ascii_encode(*args,**kw):\n pass\n \ndef charbuffer_encode(*args,**kw):\n pass\n \ndef charmap_build(*args,**kw):\n pass\n \ndef charmap_decode(*args,**kw):\n pass\n \ndef charmap_encode(*args,**kw):\n pass\n \ndef decode(*args,**kw):\n ''\n\n\n\n\n\n \n pass\n \ndef encode(*args,**kw):\n ''\n\n\n\n\n\n \n obj=args[0]\n if len(args)==2:\n encoding=args[1]\n else :\n encoding='utf-8'\n if isinstance(obj,str):\n return obj.encode(encoding)\n \ndef escape_decode(*args,**kw):\n pass\n \ndef escape_encode(*args,**kw):\n pass\n \ndef latin_1_decode(*args,**kw):\n pass\n \ndef latin_1_encode(*args,**kw):\n pass\n \ndef lookup(encoding):\n ''\n\n \n \n if encoding in ('utf-8','utf_8'):\n from javascript import console\n console.log('encoding',encoding)\n import encodings.utf_8\n return encodings.utf_8.getregentry()\n \n LookupError(encoding)\n \ndef lookup_error(*args,**kw):\n ''\n\n \n pass\n \ndef mbcs_decode(*args,**kw):\n pass\n \ndef mbcs_encode(*args,**kw):\n pass\n \ndef raw_unicode_escape_decode(*args,**kw):\n pass\n \ndef raw_unicode_escape_encode(*args,**kw):\n pass\n \ndef readbuffer_encode(*args,**kw):\n pass\n \ndef register(*args,**kw):\n ''\n\n\n\n \n pass\n \ndef register_error(*args,**kw):\n ''\n\n\n\n\n \n pass\n \ndef unicode_escape_decode(*args,**kw):\n pass\n \ndef unicode_escape_encode(*args,**kw):\n pass\n \ndef unicode_internal_decode(*args,**kw):\n pass\n \ndef unicode_internal_encode(*args,**kw):\n pass\n \ndef utf_16_be_decode(*args,**kw):\n pass\n \ndef utf_16_be_encode(*args,**kw):\n pass\n \ndef utf_16_decode(*args,**kw):\n pass\n \ndef utf_16_encode(*args,**kw):\n pass\n \ndef utf_16_ex_decode(*args,**kw):\n pass\n \ndef utf_16_le_decode(*args,**kw):\n pass\n \ndef utf_16_le_encode(*args,**kw):\n pass\n \ndef utf_32_be_decode(*args,**kw):\n pass\n \ndef utf_32_be_encode(*args,**kw):\n pass\n \ndef utf_32_decode(*args,**kw):\n pass\n \ndef utf_32_encode(*args,**kw):\n pass\n \ndef utf_32_ex_decode(*args,**kw):\n pass\n \ndef utf_32_le_decode(*args,**kw):\n pass\n \ndef utf_32_le_encode(*args,**kw):\n pass\n \ndef utf_7_decode(*args,**kw):\n pass\n \ndef utf_7_encode(*args,**kw):\n pass\n \ndef utf_8_decode(*args,**kw):\n pass\n \ndef utf_8_encode(*args,**kw):\n input=args[0]\n if len(args)==2:\n errors=args[1]\n else :\n errors=kw.get('errors','strict')\n \n \n \n return (bytes([_f for _f in input],'utf-8'),len(input))\n"], "asyncio.base_subprocess": [".py", "import collections\nimport subprocess\nimport sys\nimport warnings\n\nfrom .import futures\nfrom .import protocols\nfrom .import transports\nfrom .coroutines import coroutine\nfrom .log import logger\n\n\nclass BaseSubprocessTransport(transports.SubprocessTransport):\n\n def __init__(self,loop,protocol,args,shell,\n stdin,stdout,stderr,bufsize,\n waiter=None ,extra=None ,**kwargs):\n super().__init__(extra)\n self._closed=False\n self._protocol=protocol\n self._loop=loop\n self._proc=None\n self._pid=None\n self._returncode=None\n self._exit_waiters=[]\n self._pending_calls=collections.deque()\n self._pipes={}\n self._finished=False\n \n if stdin ==subprocess.PIPE:\n self._pipes[0]=None\n if stdout ==subprocess.PIPE:\n self._pipes[1]=None\n if stderr ==subprocess.PIPE:\n self._pipes[2]=None\n \n \n self._start(args=args,shell=shell,stdin=stdin,stdout=stdout,\n stderr=stderr,bufsize=bufsize,**kwargs)\n self._pid=self._proc.pid\n self._extra['subprocess']=self._proc\n \n if self._loop.get_debug():\n if isinstance(args,(bytes,str)):\n program=args\n else :\n program=args[0]\n logger.debug('process %r created: pid %s',\n program,self._pid)\n \n self._loop.create_task(self._connect_pipes(waiter))\n \n def __repr__(self):\n info=[self.__class__.__name__]\n if self._closed:\n info.append('closed')\n info.append('pid=%s'%self._pid)\n if self._returncode is not None :\n info.append('returncode=%s'%self._returncode)\n \n stdin=self._pipes.get(0)\n if stdin is not None :\n info.append('stdin=%s'%stdin.pipe)\n \n stdout=self._pipes.get(1)\n stderr=self._pipes.get(2)\n if stdout is not None and stderr is stdout:\n info.append('stdout=stderr=%s'%stdout.pipe)\n else :\n if stdout is not None :\n info.append('stdout=%s'%stdout.pipe)\n if stderr is not None :\n info.append('stderr=%s'%stderr.pipe)\n \n return'<%s>'%' '.join(info)\n \n def _start(self,args,shell,stdin,stdout,stderr,bufsize,**kwargs):\n raise NotImplementedError\n \n def _make_write_subprocess_pipe_proto(self,fd):\n raise NotImplementedError\n \n def _make_read_subprocess_pipe_proto(self,fd):\n raise NotImplementedError\n \n def close(self):\n if self._closed:\n return\n self._closed=True\n \n for proto in self._pipes.values():\n if proto is None :\n continue\n proto.pipe.close()\n \n if self._proc is not None and self._returncode is None :\n if self._loop.get_debug():\n logger.warning('Close running child process: kill %r',self)\n \n try :\n self._proc.kill()\n except ProcessLookupError:\n pass\n \n \n \n \n \n \n if sys.version_info >=(3,4):\n def __del__(self):\n if not self._closed:\n warnings.warn(\"unclosed transport %r\"%self,ResourceWarning)\n self.close()\n \n def get_pid(self):\n return self._pid\n \n def get_returncode(self):\n return self._returncode\n \n def get_pipe_transport(self,fd):\n if fd in self._pipes:\n return self._pipes[fd].pipe\n else :\n return None\n \n def _check_proc(self):\n if self._proc is None :\n raise ProcessLookupError()\n \n def send_signal(self,signal):\n self._check_proc()\n self._proc.send_signal(signal)\n \n def terminate(self):\n self._check_proc()\n self._proc.terminate()\n \n def kill(self):\n self._check_proc()\n self._proc.kill()\n \n @coroutine\n def _connect_pipes(self,waiter):\n try :\n proc=self._proc\n loop=self._loop\n \n if proc.stdin is not None :\n _,pipe=yield from loop.connect_write_pipe(\n lambda :WriteSubprocessPipeProto(self,0),\n proc.stdin)\n self._pipes[0]=pipe\n \n if proc.stdout is not None :\n _,pipe=yield from loop.connect_read_pipe(\n lambda :ReadSubprocessPipeProto(self,1),\n proc.stdout)\n self._pipes[1]=pipe\n \n if proc.stderr is not None :\n _,pipe=yield from loop.connect_read_pipe(\n lambda :ReadSubprocessPipeProto(self,2),\n proc.stderr)\n self._pipes[2]=pipe\n \n assert self._pending_calls is not None\n \n loop.call_soon(self._protocol.connection_made,self)\n for callback,data in self._pending_calls:\n loop.call_soon(callback,*data)\n self._pending_calls=None\n except Exception as exc:\n if waiter is not None and not waiter.cancelled():\n waiter.set_exception(exc)\n else :\n if waiter is not None and not waiter.cancelled():\n waiter.set_result(None )\n \n def _call(self,cb,*data):\n if self._pending_calls is not None :\n self._pending_calls.append((cb,data))\n else :\n self._loop.call_soon(cb,*data)\n \n def _pipe_connection_lost(self,fd,exc):\n self._call(self._protocol.pipe_connection_lost,fd,exc)\n self._try_finish()\n \n def _pipe_data_received(self,fd,data):\n self._call(self._protocol.pipe_data_received,fd,data)\n \n def _process_exited(self,returncode):\n assert returncode is not None ,returncode\n assert self._returncode is None ,self._returncode\n if self._loop.get_debug():\n logger.info('%r exited with return code %r',\n self,returncode)\n self._returncode=returncode\n self._call(self._protocol.process_exited)\n self._try_finish()\n \n \n for waiter in self._exit_waiters:\n if not waiter.cancelled():\n waiter.set_result(returncode)\n self._exit_waiters=None\n \n def _wait(self):\n ''\n\n \n if self._returncode is not None :\n return self._returncode\n \n waiter=futures.Future(loop=self._loop)\n self._exit_waiters.append(waiter)\n return (yield from waiter)\n \n def _try_finish(self):\n assert not self._finished\n if self._returncode is None :\n return\n if all(p is not None and p.disconnected\n for p in self._pipes.values()):\n self._finished=True\n self._call(self._call_connection_lost,None )\n \n def _call_connection_lost(self,exc):\n try :\n self._protocol.connection_lost(exc)\n finally :\n self._loop=None\n self._proc=None\n self._protocol=None\n \n \nclass WriteSubprocessPipeProto(protocols.BaseProtocol):\n\n def __init__(self,proc,fd):\n self.proc=proc\n self.fd=fd\n self.pipe=None\n self.disconnected=False\n \n def connection_made(self,transport):\n self.pipe=transport\n \n def __repr__(self):\n return ('<%s fd=%s pipe=%r>'\n %(self.__class__.__name__,self.fd,self.pipe))\n \n def connection_lost(self,exc):\n self.disconnected=True\n self.proc._pipe_connection_lost(self.fd,exc)\n self.proc=None\n \n def pause_writing(self):\n self.proc._protocol.pause_writing()\n \n def resume_writing(self):\n self.proc._protocol.resume_writing()\n \n \nclass ReadSubprocessPipeProto(WriteSubprocessPipeProto,\nprotocols.Protocol):\n\n def data_received(self,data):\n self.proc._pipe_data_received(self.fd,data)\n"], "importlib.machinery": [".py", "''\n\nimport _imp\n\nfrom ._bootstrap import (SOURCE_SUFFIXES,DEBUG_BYTECODE_SUFFIXES,\nOPTIMIZED_BYTECODE_SUFFIXES,\nEXTENSION_SUFFIXES)\nfrom ._bootstrap import BuiltinImporter\nfrom ._bootstrap import FrozenImporter\nfrom ._bootstrap import WindowsRegistryFinder\nfrom ._bootstrap import PathFinder\nfrom ._bootstrap import FileFinder\nfrom ._bootstrap import SourceFileLoader\nfrom ._bootstrap import SourcelessFileLoader\nfrom ._bootstrap import ExtensionFileLoader\n\n\n\n\n\n"], "tokenize": [".py", "''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n__author__='Ka-Ping Yee '\n__credits__=('GvR, ESR, Tim Peters, Thomas Wouters, Fred Drake, '\n'Skip Montanaro, Raymond Hettinger, Trent Nelson, '\n'Michael Foord')\nimport builtins\nimport re\nimport sys\nfrom token import *\nfrom codecs import lookup,BOM_UTF8\nimport collections\nfrom io import TextIOWrapper\ncookie_re=re.compile(r'^[ \\t\\f]*#.*coding[:=][ \\t]*([-\\w.]+)',re.ASCII)\n\nimport token\n__all__=token.__all__+[\"COMMENT\",\"tokenize\",\"detect_encoding\",\n\"NL\",\"untokenize\",\"ENCODING\",\"TokenInfo\"]\ndel token\n\nCOMMENT=N_TOKENS\ntok_name[COMMENT]='COMMENT'\nNL=N_TOKENS+1\ntok_name[NL]='NL'\nENCODING=N_TOKENS+2\ntok_name[ENCODING]='ENCODING'\nN_TOKENS +=3\nEXACT_TOKEN_TYPES={\n'(':LPAR,\n')':RPAR,\n'[':LSQB,\n']':RSQB,\n':':COLON,\n',':COMMA,\n';':SEMI,\n'+':PLUS,\n'-':MINUS,\n'*':STAR,\n'/':SLASH,\n'|':VBAR,\n'&':AMPER,\n'<':LESS,\n'>':GREATER,\n'=':EQUAL,\n'.':DOT,\n'%':PERCENT,\n'{':LBRACE,\n'}':RBRACE,\n'==':EQEQUAL,\n'!=':NOTEQUAL,\n'<=':LESSEQUAL,\n'>=':GREATEREQUAL,\n'~':TILDE,\n'^':CIRCUMFLEX,\n'<<':LEFTSHIFT,\n'>>':RIGHTSHIFT,\n'**':DOUBLESTAR,\n'+=':PLUSEQUAL,\n'-=':MINEQUAL,\n'*=':STAREQUAL,\n'/=':SLASHEQUAL,\n'%=':PERCENTEQUAL,\n'&=':AMPEREQUAL,\n'|=':VBAREQUAL,\n'^=':CIRCUMFLEXEQUAL,\n'<<=':LEFTSHIFTEQUAL,\n'>>=':RIGHTSHIFTEQUAL,\n'**=':DOUBLESTAREQUAL,\n'//':DOUBLESLASH,\n'//=':DOUBLESLASHEQUAL,\n'@':AT\n}\n\nclass TokenInfo(collections.namedtuple('TokenInfo','type string start end line')):\n def __repr__(self):\n annotated_type='%d (%s)'%(self.type,tok_name[self.type])\n return ('TokenInfo(type=%s, string=%r, start=%r, end=%r, line=%r)'%\n self._replace(type=annotated_type))\n \n @property\n def exact_type(self):\n if self.type ==OP and self.string in EXACT_TOKEN_TYPES:\n return EXACT_TOKEN_TYPES[self.string]\n else :\n return self.type\n \ndef group(*choices):return'('+'|'.join(choices)+')'\ndef any(*choices):return group(*choices)+'*'\ndef maybe(*choices):return group(*choices)+'?'\n\n\n\nWhitespace=r'[ \\f\\t]*'\nComment=r'#[^\\r\\n]*'\nIgnore=Whitespace+any(r'\\\\\\r?\\n'+Whitespace)+maybe(Comment)\nName=r'\\w+'\n\nHexnumber=r'0[xX][0-9a-fA-F]+'\nBinnumber=r'0[bB][01]+'\nOctnumber=r'0[oO][0-7]+'\nDecnumber=r'(?:0+|[1-9][0-9]*)'\nIntnumber=group(Hexnumber,Binnumber,Octnumber,Decnumber)\nExponent=r'[eE][-+]?[0-9]+'\nPointfloat=group(r'[0-9]+\\.[0-9]*',r'\\.[0-9]+')+maybe(Exponent)\nExpfloat=r'[0-9]+'+Exponent\nFloatnumber=group(Pointfloat,Expfloat)\nImagnumber=group(r'[0-9]+[jJ]',Floatnumber+r'[jJ]')\nNumber=group(Imagnumber,Floatnumber,Intnumber)\n\nStringPrefix=r'(?:[bB][rR]?|[rR][bB]?|[uU])?'\n\n\nSingle=r\"[^'\\\\]*(?:\\\\.[^'\\\\]*)*'\"\n\nDouble=r'[^\"\\\\]*(?:\\\\.[^\"\\\\]*)*\"'\n\nSingle3=r\"[^'\\\\]*(?:(?:\\\\.|'(?!''))[^'\\\\]*)*'''\"\n\nDouble3=r'[^\"\\\\]*(?:(?:\\\\.|\"(?!\"\"))[^\"\\\\]*)*\"\"\"'\nTriple=group(StringPrefix+\"'''\",StringPrefix+'\"\"\"')\n\nString=group(StringPrefix+r\"'[^\\n'\\\\]*(?:\\\\.[^\\n'\\\\]*)*'\",\nStringPrefix+r'\"[^\\n\"\\\\]*(?:\\\\.[^\\n\"\\\\]*)*\"')\n\n\n\n\nOperator=group(r\"\\*\\*=?\",r\">>=?\",r\"<<=?\",r\"!=\",\nr\"//=?\",r\"->\",\nr\"[+\\-*/%&|^=<>]=?\",\nr\"~\")\n\nBracket='[][(){}]'\nSpecial=group(r'\\r?\\n',r'\\.\\.\\.',r'[:;.,@]')\nFunny=group(Operator,Bracket,Special)\n\nPlainToken=group(Number,Funny,String,Name)\nToken=Ignore+PlainToken\n\n\nContStr=group(StringPrefix+r\"'[^\\n'\\\\]*(?:\\\\.[^\\n'\\\\]*)*\"+\ngroup(\"'\",r'\\\\\\r?\\n'),\nStringPrefix+r'\"[^\\n\"\\\\]*(?:\\\\.[^\\n\"\\\\]*)*'+\ngroup('\"',r'\\\\\\r?\\n'))\nPseudoExtras=group(r'\\\\\\r?\\n|\\Z',Comment,Triple)\nPseudoToken=Whitespace+group(PseudoExtras,Number,Funny,ContStr,Name)\n\ndef _compile(expr):\n return re.compile(expr,re.UNICODE)\n \nendpats={\"'\":Single,'\"':Double,\n\"'''\":Single3,'\"\"\"':Double3,\n\"r'''\":Single3,'r\"\"\"':Double3,\n\"b'''\":Single3,'b\"\"\"':Double3,\n\"R'''\":Single3,'R\"\"\"':Double3,\n\"B'''\":Single3,'B\"\"\"':Double3,\n\"br'''\":Single3,'br\"\"\"':Double3,\n\"bR'''\":Single3,'bR\"\"\"':Double3,\n\"Br'''\":Single3,'Br\"\"\"':Double3,\n\"BR'''\":Single3,'BR\"\"\"':Double3,\n\"rb'''\":Single3,'rb\"\"\"':Double3,\n\"Rb'''\":Single3,'Rb\"\"\"':Double3,\n\"rB'''\":Single3,'rB\"\"\"':Double3,\n\"RB'''\":Single3,'RB\"\"\"':Double3,\n\"u'''\":Single3,'u\"\"\"':Double3,\n\"R'''\":Single3,'R\"\"\"':Double3,\n\"U'''\":Single3,'U\"\"\"':Double3,\n'r':None ,'R':None ,'b':None ,'B':None ,\n'u':None ,'U':None }\n\ntriple_quoted={}\nfor t in (\"'''\",'\"\"\"',\n\"r'''\",'r\"\"\"',\"R'''\",'R\"\"\"',\n\"b'''\",'b\"\"\"',\"B'''\",'B\"\"\"',\n\"br'''\",'br\"\"\"',\"Br'''\",'Br\"\"\"',\n\"bR'''\",'bR\"\"\"',\"BR'''\",'BR\"\"\"',\n\"rb'''\",'rb\"\"\"',\"rB'''\",'rB\"\"\"',\n\"Rb'''\",'Rb\"\"\"',\"RB'''\",'RB\"\"\"',\n\"u'''\",'u\"\"\"',\"U'''\",'U\"\"\"',\n):\n triple_quoted[t]=t\nsingle_quoted={}\nfor t in (\"'\",'\"',\n\"r'\",'r\"',\"R'\",'R\"',\n\"b'\",'b\"',\"B'\",'B\"',\n\"br'\",'br\"',\"Br'\",'Br\"',\n\"bR'\",'bR\"',\"BR'\",'BR\"',\n\"rb'\",'rb\"',\"rB'\",'rB\"',\n\"Rb'\",'Rb\"',\"RB'\",'RB\"',\n\"u'\",'u\"',\"U'\",'U\"',\n):\n single_quoted[t]=t\n \ntabsize=8\n\nclass TokenError(Exception):pass\n\nclass StopTokenizing(Exception):pass\n\n\nclass Untokenizer:\n\n def __init__(self):\n self.tokens=[]\n self.prev_row=1\n self.prev_col=0\n self.encoding=None\n \n def add_whitespace(self,start):\n row,col=start\n assert row <=self.prev_row\n col_offset=col -self.prev_col\n if col_offset:\n self.tokens.append(\" \"*col_offset)\n \n def untokenize(self,iterable):\n for t in iterable:\n if len(t)==2:\n self.compat(t,iterable)\n break\n tok_type,token,start,end,line=t\n if tok_type ==ENCODING:\n self.encoding=token\n continue\n self.add_whitespace(start)\n self.tokens.append(token)\n self.prev_row,self.prev_col=end\n if tok_type in (NEWLINE,NL):\n self.prev_row +=1\n self.prev_col=0\n return\"\".join(self.tokens)\n \n def compat(self,token,iterable):\n startline=False\n indents=[]\n toks_append=self.tokens.append\n toknum,tokval=token\n \n if toknum in (NAME,NUMBER):\n tokval +=' '\n if toknum in (NEWLINE,NL):\n startline=True\n prevstring=False\n for tok in iterable:\n toknum,tokval=tok[:2]\n if toknum ==ENCODING:\n self.encoding=tokval\n continue\n \n if toknum in (NAME,NUMBER):\n tokval +=' '\n \n \n if toknum ==STRING:\n if prevstring:\n tokval=' '+tokval\n prevstring=True\n else :\n prevstring=False\n \n if toknum ==INDENT:\n indents.append(tokval)\n continue\n elif toknum ==DEDENT:\n indents.pop()\n continue\n elif toknum in (NEWLINE,NL):\n startline=True\n elif startline and indents:\n toks_append(indents[-1])\n startline=False\n toks_append(tokval)\n \n \ndef untokenize(iterable):\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n ut=Untokenizer()\n out=ut.untokenize(iterable)\n if ut.encoding is not None :\n out=out.encode(ut.encoding)\n return out\n \n \ndef _get_normal_name(orig_enc):\n ''\n \n enc=orig_enc[:12].lower().replace(\"_\",\"-\")\n if enc ==\"utf-8\"or enc.startswith(\"utf-8-\"):\n return\"utf-8\"\n if enc in (\"latin-1\",\"iso-8859-1\",\"iso-latin-1\")or enc.startswith((\"latin-1-\",\"iso-8859-1-\",\"iso-latin-1-\")):\n return\"iso-8859-1\"\n return orig_enc\n \ndef detect_encoding(readline):\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n try :\n filename=readline.__self__.name\n except AttributeError:\n filename=None\n bom_found=False\n encoding=None\n default='utf-8'\n def read_or_stop():\n try :\n return readline()\n except StopIteration:\n return b''\n \n def find_cookie(line):\n try :\n \n \n \n line_string=line.decode('utf-8')\n except UnicodeDecodeError:\n msg=\"invalid or missing encoding declaration\"\n if filename is not None :\n msg='{} for {!r}'.format(msg,filename)\n raise SyntaxError(msg)\n \n match=cookie_re.match(line_string)\n if not match:\n return None\n encoding=_get_normal_name(match.group(1))\n try :\n codec=lookup(encoding)\n except LookupError:\n \n if filename is None :\n msg=\"unknown encoding: \"+encoding\n else :\n msg=\"unknown encoding for {!r}: {}\".format(filename,\n encoding)\n raise SyntaxError(msg)\n \n if bom_found:\n if encoding !='utf-8':\n \n if filename is None :\n msg='encoding problem: utf-8'\n else :\n msg='encoding problem for {!r}: utf-8'.format(filename)\n raise SyntaxError(msg)\n encoding +='-sig'\n return encoding\n \n first=read_or_stop()\n if first.startswith(BOM_UTF8):\n bom_found=True\n first=first[3:]\n default='utf-8-sig'\n if not first:\n return default,[]\n \n encoding=find_cookie(first)\n if encoding:\n return encoding,[first]\n \n second=read_or_stop()\n if not second:\n return default,[first]\n \n encoding=find_cookie(second)\n if encoding:\n return encoding,[first,second]\n \n return default,[first,second]\n \n \ndef open(filename):\n ''\n\n \n buffer=builtins.open(filename,'rb')\n encoding,lines=detect_encoding(buffer.readline)\n buffer.seek(0)\n text=TextIOWrapper(buffer,encoding,line_buffering=True )\n text.mode='r'\n return text\n \n \ndef tokenize(readline):\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n \n \n from itertools import chain,repeat\n encoding,consumed=detect_encoding(readline)\n rl_gen=iter(readline,b\"\")\n empty=repeat(b\"\")\n return _tokenize(chain(consumed,rl_gen,empty).__next__,encoding)\n \n \ndef _tokenize(readline,encoding):\n lnum=parenlev=continued=0\n numchars='0123456789'\n contstr,needcont='',0\n contline=None\n indents=[0]\n \n if encoding is not None :\n if encoding ==\"utf-8-sig\":\n \n encoding=\"utf-8\"\n yield TokenInfo(ENCODING,encoding,(0,0),(0,0),'')\n while True :\n try :\n line=readline()\n except StopIteration:\n line=b''\n \n if encoding is not None :\n line=line.decode(encoding)\n lnum +=1\n pos,max=0,len(line)\n \n if contstr:\n if not line:\n raise TokenError(\"EOF in multi-line string\",strstart)\n endmatch=endprog.match(line)\n if endmatch:\n pos=end=endmatch.end(0)\n yield TokenInfo(STRING,contstr+line[:end],\n strstart,(lnum,end),contline+line)\n contstr,needcont='',0\n contline=None\n elif needcont and line[-2:]!='\\\\\\n'and line[-3:]!='\\\\\\r\\n':\n yield TokenInfo(ERRORTOKEN,contstr+line,\n strstart,(lnum,len(line)),contline)\n contstr=''\n contline=None\n continue\n else :\n contstr=contstr+line\n contline=contline+line\n continue\n \n elif parenlev ==0 and not continued:\n if not line:break\n column=0\n while pos indents[-1]:\n indents.append(column)\n yield TokenInfo(INDENT,line[:pos],(lnum,0),(lnum,pos),line)\n while column \",lnum,pos,line))\n indents=indents[:-1]\n yield TokenInfo(DEDENT,'',(lnum,pos),(lnum,pos),line)\n \n else :\n if not line:\n raise TokenError(\"EOF in multi-line statement\",(lnum,0))\n continued=0\n \n while pos 0 else NEWLINE,\n token,spos,epos,line)\n elif initial =='#':\n assert not token.endswith(\"\\n\")\n yield TokenInfo(COMMENT,token,spos,epos,line)\n elif token in triple_quoted:\n endprog=_compile(endpats[token])\n endmatch=endprog.match(line,pos)\n if endmatch:\n pos=endmatch.end(0)\n token=line[start:pos]\n yield TokenInfo(STRING,token,spos,(lnum,pos),line)\n else :\n strstart=(lnum,start)\n contstr=line[start:]\n contline=line\n break\n elif initial in single_quoted or token[:2]in single_quoted or token[:3]in single_quoted:\n if token[-1]=='\\n':\n strstart=(lnum,start)\n endprog=_compile(endpats[initial]or\n endpats[token[1]]or\n endpats[token[2]])\n contstr,needcont=line[start:],1\n contline=line\n break\n else :\n yield TokenInfo(STRING,token,spos,epos,line)\n elif initial.isidentifier():\n yield TokenInfo(NAME,token,spos,epos,line)\n elif initial =='\\\\':\n continued=1\n else :\n if initial in'([{':\n parenlev +=1\n elif initial in')]}':\n parenlev -=1\n yield TokenInfo(OP,token,spos,epos,line)\n else :\n yield TokenInfo(ERRORTOKEN,line[pos],\n (lnum,pos),(lnum,pos+1),line)\n pos +=1\n \n for indent in indents[1:]:\n yield TokenInfo(DEDENT,'',(lnum,0),(lnum,0),'')\n yield TokenInfo(ENDMARKER,'',(lnum,0),(lnum,0),'')\n \n \n \n \ndef generate_tokens(readline):\n return _tokenize(readline,None )\n \ndef main():\n import argparse\n \n \n def perror(message):\n print(message,file=sys.stderr)\n \n def error(message,filename=None ,location=None ):\n if location:\n args=(filename,)+location+(message,)\n perror(\"%s:%d:%d: error: %s\"%args)\n elif filename:\n perror(\"%s: error: %s\"%(filename,message))\n else :\n perror(\"error: %s\"%message)\n sys.exit(1)\n \n \n parser=argparse.ArgumentParser(prog='python -m tokenize')\n parser.add_argument(dest='filename',nargs='?',\n metavar='filename.py',\n help='the file to tokenize; defaults to stdin')\n parser.add_argument('-e','--exact',dest='exact',action='store_true',\n help='display token names using the exact type')\n args=parser.parse_args()\n \n try :\n \n if args.filename:\n filename=args.filename\n with builtins.open(filename,'rb')as f:\n tokens=list(tokenize(f.readline))\n else :\n filename=\"\"\n tokens=_tokenize(sys.stdin.readline,None )\n \n \n for token in tokens:\n token_type=token.type\n if args.exact:\n token_type=token.exact_type\n token_range=\"%d,%d-%d,%d:\"%(token.start+token.end)\n print(\"%-20s%-15s%-15r\"%\n (token_range,tok_name[token_type],token.string))\n except IndentationError as err:\n line,column=err.args[1][1:3]\n error(err.args[0],filename,(line,column))\n except TokenError as err:\n line,column=err.args[1]\n error(err.args[0],filename,(line,column))\n except SyntaxError as err:\n error(err,filename)\n except IOError as err:\n error(err)\n except KeyboardInterrupt:\n print(\"interrupted\\n\")\n except Exception as err:\n perror(\"unexpected error: %s\"%err)\n raise\n \nif __name__ ==\"__main__\":\n main()\n"], "browser.markdown": [".py", "\n\ntry :\n import _jsre as re\nexcept :\n import re\n \nimport random\nimport time\n\nletters='abcdefghijklmnopqrstuvwxyz'\nletters +=letters.upper()+'0123456789'\n\nclass URL:\n def __init__(self,src):\n elts=src.split(maxsplit=1)\n self.href=elts[0]\n self.alt=''\n if len(elts)==2:\n alt=elts[1]\n if alt[0]=='\"'and alt[-1]=='\"':self.alt=alt[1:-1]\n elif alt[0]==\"'\"and alt[-1]==\"'\":self.alt=alt[1:-1]\n elif alt[0]==\"(\"and alt[-1]==\")\":self.alt=alt[1:-1]\n \nclass CodeBlock:\n def __init__(self,line):\n self.lines=[line]\n if line.startswith(\"```\")and len(line)>3:\n self.info=line[3:]\n else :\n self.info=None\n \n def to_html(self):\n if self.lines[0].startswith(\"`\"):\n self.lines.pop(0)\n res=escape('\\n'.join(self.lines))\n res=unmark(res)\n _class=self.info or\"marked\"\n res='
%s
\\n'%(_class,res)\n return res,[]\n \nclass HtmlBlock:\n\n def __init__(self,src):\n self.src=src\n \n def to_html(self):\n return self.src\n \nclass Marked:\n def __init__(self,line=''):\n self.line=line\n self.children=[]\n \n def to_html(self):\n return apply_markdown(self.line)\n \n \nrefs={}\nref_pattern=r\"^\\[(.*)\\]:\\s+(.*)\"\n\ndef mark(src):\n\n global refs\n t0=time.time()\n refs={}\n \n \n \n \n \n \n \n \n src=src.replace('\\r\\n','\\n')\n \n \n src=re.sub(r'(.*?)\\n=+\\n','\\n# \\\\1\\n',src)\n src=re.sub(r'(.*?)\\n-+\\n','\\n## \\\\1\\n',src)\n \n lines=src.split('\\n')+['']\n \n i=bq=0\n ul=ol=0\n \n while i '):\n nb=1\n while nb ':\n nb +=1\n lines[i]=lines[i][nb:]\n if nb >bq:\n lines.insert(i,'
'*(nb -bq))\n i +=1\n bq=nb\n elif nb '*(bq -nb))\n i +=1\n bq=nb\n elif bq >0:\n lines.insert(i,'
'*bq)\n i +=1\n bq=0\n \n \n if lines[i].strip()and lines[i].lstrip()[0]in'-+*' and len(lines[i].lstrip())>1 and lines[i].lstrip()[1]==' ' and (i ==0 or ul or not lines[i -1].strip()):\n \n nb=1+len(lines[i])-len(lines[i].lstrip())\n lines[i]='
  • '+lines[i][nb:]\n if nb >ul:\n lines.insert(i,'
      '*(nb -ul))\n i +=1\n elif nb '*(ul -nb))\n i +=1\n ul=nb\n elif ul and not lines[i].strip():\n if i 1 and nline[1]==' ':\n pass\n else :\n lines.insert(i,'
    '*ul)\n i +=1\n ul=0\n \n \n mo=re.search(r'^(\\d+\\.)',lines[i])\n if mo:\n if not ol:\n lines.insert(i,'
      ')\n i +=1\n lines[i]='
    1. '+lines[i][len(mo.groups()[0]):]\n ol=1\n elif ol and not lines[i].strip()and i ')\n i +=1\n ol=0\n \n i +=1\n \n if ul:\n lines.append(''*ul)\n if ol:\n lines.append('
    '*ol)\n if bq:\n lines.append(''*bq)\n \n t1=time.time()\n \n sections=[]\n scripts=[]\n section=Marked()\n \n i=0\n while i '):\n scripts.append('\\n'.join(lines[i+1:j]))\n for k in range(i,j+1):\n lines[k]=''\n break\n j +=1\n i=j\n continue\n \n \n elif line.startswith('#'):\n level=1\n line=lines[i]\n while level ','>')\n czone=czone.replace('_','_')\n czone=czone.replace('*','*')\n return czone\n \ndef s_escape(mo):\n\n czone=mo.string[mo.start():mo.end()]\n return escape(czone)\n \ndef unmark(code_zone):\n\n code_zone=code_zone.replace('_','_')\n return code_zone\n \ndef s_unmark(mo):\n\n code_zone=mo.string[mo.start():mo.end()]\n code_zone=code_zone.replace('_','_')\n return code_zone\n \ndef apply_markdown(src):\n\n scripts=[]\n key=None\n \n t0=time.time()\n i=0\n while i -1 and src[start_a:end_a].find('\\n')==-1:\n link=src[start_a:end_a]\n rest=src[end_a+1:].lstrip()\n if rest and rest[0]=='(':\n j=0\n while True :\n end_href=rest.find(')',j)\n if end_href ==-1:\n break\n if rest[end_href -1]=='\\\\':\n j=end_href+1\n else :\n break\n if end_href >-1 and rest[:end_href].find('\\n')==-1:\n tag=''+link+''\n src=src[:start_a -1]+tag+rest[end_href+1:]\n i=start_a+len(tag)\n elif rest and rest[0]=='[':\n j=0\n while True :\n end_key=rest.find(']',j)\n if end_key ==-1:\n break\n if rest[end_key -1]=='\\\\':\n j=end_key+1\n else :\n break\n if end_key >-1 and rest[:end_key].find('\\n')==-1:\n if not key:\n key=link\n if key.lower()not in refs:\n raise KeyError('unknown reference %s'%key)\n url=refs[key.lower()]\n tag=''+link+''\n src=src[:start_a -1]+tag+rest[end_key+1:]\n i=start_a+len(tag)\n \n i +=1\n \n t1=time.time()\n \n \n \n \n \n rstr=''.join(random.choice(letters)for i in range(16))\n \n i=0\n state=None\n start=-1\n data=''\n tags=[]\n while i 'and state is None :\n tags.append(src[i:j+1])\n src=src[:i]+rstr+src[j+1:]\n i +=len(rstr)\n break\n elif state =='\"'or state ==\"'\":\n data +=src[j]\n elif src[j]=='\\n':\n \n \n src=src[:i]+'<'+src[i+1:]\n j=i+4\n break\n j +=1\n elif src[i]=='`'and i >0 and src[i -1]!='\\\\':\n \n j=i+1\n while j \\1'%(tag,tag),src)\n \n em_patterns=[('EM',r'\\*(.*?)\\*'),('I',r'\\_(.*?)\\_')]\n for tag,em_pattern in em_patterns:\n src=re.sub(em_pattern,r'<%s>\\1'%(tag,tag),src)\n \n \n code_pattern=r'\\`(.*?)\\`'\n src=re.sub(code_pattern,r'\\1',src)\n \n \n while True :\n pos=src.rfind(rstr)\n if pos ==-1:\n break\n repl=tags.pop()\n src=src[:pos]+repl+src[pos+len(rstr):]\n \n src='

    '+src+'

    '\n \n t3=time.time()\n \n \n return src,scripts\n"], "asyncio.http": [".py", "import asyncio\n\nfrom browser import ajax\n\n__all__=['HTTPException','HTTPRequest']\n\nclass HTTPException(Exception):\n ''\n\n \n def __init__(self,request):\n super(HTTPException,self).__init__()\n self.req=request\n \n \nclass HTTPRequest(asyncio.Future):\n ''\n\n \n METHOD_POST='POST'\n METHOD_GET='GET'\n \n def __init__(self,url,method='GET',data=None ,**kwargs):\n super(HTTPRequest,self).__init__(**kwargs)\n self._url=url\n self._req=ajax.ajax()\n self._req.bind(\"complete\",self._complete_handler)\n self._data=data\n self._method=method\n self._req.open(self._method,self._url,True )\n self._req.set_header('content-type','application/x-www-form-urlencoded')\n if self._data is None :\n self._req.send()\n else :\n self._req.send(self._data)\n \n def _complete_handler(self,req):\n if req.status ==200 or req.status ==0:\n self.set_result(req)\n else :\n self.set_exception(HTTPException(req))\n"], "webbrowser": [".py", "from browser import window\n\n__all__=[\"Error\",\"open\",\"open_new\",\"open_new_tab\"]\n\nclass Error(Exception):\n pass\n \n_target={0:'',1:'_blank',2:'_new'}\n\n\ndef open(url,new=0,autoraise=True ):\n ''\n\n\n \n \n \n if'://'in url:\n if url[:6]=='ftp://':\n print('entro')\n else :\n protocol=url.split('//:')[0]\n url=url.replace(protocol+'//:','//')\n else :\n url='//'+url\n print(url)\n if window.open(url,_target[new]):\n return True\n return False\n \ndef open_new(url):\n return open(url,1)\n \ndef open_new_tab(url):\n return open(url,2)\n \n \n"], "encodings.iso8859_4": [".py", "''\n\n\n\nimport codecs\n\n\n\nclass Codec(codecs.Codec):\n\n def encode(self,input,errors='strict'):\n return codecs.charmap_encode(input,errors,encoding_table)\n \n def decode(self,input,errors='strict'):\n return codecs.charmap_decode(input,errors,decoding_table)\n \nclass IncrementalEncoder(codecs.IncrementalEncoder):\n def encode(self,input,final=False ):\n return codecs.charmap_encode(input,self.errors,encoding_table)[0]\n \nclass IncrementalDecoder(codecs.IncrementalDecoder):\n def decode(self,input,final=False ):\n return codecs.charmap_decode(input,self.errors,decoding_table)[0]\n \nclass StreamWriter(Codec,codecs.StreamWriter):\n pass\n \nclass StreamReader(Codec,codecs.StreamReader):\n pass\n \n \n \ndef getregentry():\n return codecs.CodecInfo(\n name='iso8859-4',\n encode=Codec().encode,\n decode=Codec().decode,\n incrementalencoder=IncrementalEncoder,\n incrementaldecoder=IncrementalDecoder,\n streamreader=StreamReader,\n streamwriter=StreamWriter,\n )\n \n \n \n \ndecoding_table=(\n'\\x00'\n'\\x01'\n'\\x02'\n'\\x03'\n'\\x04'\n'\\x05'\n'\\x06'\n'\\x07'\n'\\x08'\n'\\t'\n'\\n'\n'\\x0b'\n'\\x0c'\n'\\r'\n'\\x0e'\n'\\x0f'\n'\\x10'\n'\\x11'\n'\\x12'\n'\\x13'\n'\\x14'\n'\\x15'\n'\\x16'\n'\\x17'\n'\\x18'\n'\\x19'\n'\\x1a'\n'\\x1b'\n'\\x1c'\n'\\x1d'\n'\\x1e'\n'\\x1f'\n' '\n'!'\n'\"'\n'#'\n'$'\n'%'\n'&'\n\"'\"\n'('\n')'\n'*'\n'+'\n','\n'-'\n'.'\n'/'\n'0'\n'1'\n'2'\n'3'\n'4'\n'5'\n'6'\n'7'\n'8'\n'9'\n':'\n';'\n'<'\n'='\n'>'\n'?'\n'@'\n'A'\n'B'\n'C'\n'D'\n'E'\n'F'\n'G'\n'H'\n'I'\n'J'\n'K'\n'L'\n'M'\n'N'\n'O'\n'P'\n'Q'\n'R'\n'S'\n'T'\n'U'\n'V'\n'W'\n'X'\n'Y'\n'Z'\n'['\n'\\\\'\n']'\n'^'\n'_'\n'`'\n'a'\n'b'\n'c'\n'd'\n'e'\n'f'\n'g'\n'h'\n'i'\n'j'\n'k'\n'l'\n'm'\n'n'\n'o'\n'p'\n'q'\n'r'\n's'\n't'\n'u'\n'v'\n'w'\n'x'\n'y'\n'z'\n'{'\n'|'\n'}'\n'~'\n'\\x7f'\n'\\x80'\n'\\x81'\n'\\x82'\n'\\x83'\n'\\x84'\n'\\x85'\n'\\x86'\n'\\x87'\n'\\x88'\n'\\x89'\n'\\x8a'\n'\\x8b'\n'\\x8c'\n'\\x8d'\n'\\x8e'\n'\\x8f'\n'\\x90'\n'\\x91'\n'\\x92'\n'\\x93'\n'\\x94'\n'\\x95'\n'\\x96'\n'\\x97'\n'\\x98'\n'\\x99'\n'\\x9a'\n'\\x9b'\n'\\x9c'\n'\\x9d'\n'\\x9e'\n'\\x9f'\n'\\xa0'\n'\\u0104'\n'\\u0138'\n'\\u0156'\n'\\xa4'\n'\\u0128'\n'\\u013b'\n'\\xa7'\n'\\xa8'\n'\\u0160'\n'\\u0112'\n'\\u0122'\n'\\u0166'\n'\\xad'\n'\\u017d'\n'\\xaf'\n'\\xb0'\n'\\u0105'\n'\\u02db'\n'\\u0157'\n'\\xb4'\n'\\u0129'\n'\\u013c'\n'\\u02c7'\n'\\xb8'\n'\\u0161'\n'\\u0113'\n'\\u0123'\n'\\u0167'\n'\\u014a'\n'\\u017e'\n'\\u014b'\n'\\u0100'\n'\\xc1'\n'\\xc2'\n'\\xc3'\n'\\xc4'\n'\\xc5'\n'\\xc6'\n'\\u012e'\n'\\u010c'\n'\\xc9'\n'\\u0118'\n'\\xcb'\n'\\u0116'\n'\\xcd'\n'\\xce'\n'\\u012a'\n'\\u0110'\n'\\u0145'\n'\\u014c'\n'\\u0136'\n'\\xd4'\n'\\xd5'\n'\\xd6'\n'\\xd7'\n'\\xd8'\n'\\u0172'\n'\\xda'\n'\\xdb'\n'\\xdc'\n'\\u0168'\n'\\u016a'\n'\\xdf'\n'\\u0101'\n'\\xe1'\n'\\xe2'\n'\\xe3'\n'\\xe4'\n'\\xe5'\n'\\xe6'\n'\\u012f'\n'\\u010d'\n'\\xe9'\n'\\u0119'\n'\\xeb'\n'\\u0117'\n'\\xed'\n'\\xee'\n'\\u012b'\n'\\u0111'\n'\\u0146'\n'\\u014d'\n'\\u0137'\n'\\xf4'\n'\\xf5'\n'\\xf6'\n'\\xf7'\n'\\xf8'\n'\\u0173'\n'\\xfa'\n'\\xfb'\n'\\xfc'\n'\\u0169'\n'\\u016b'\n'\\u02d9'\n)\n\n\nencoding_table=codecs.charmap_build(decoding_table)\n"], "crypto_js.rollups.sha512": [".js", "/*\nCryptoJS v3.1.2\ncode.google.com/p/crypto-js\n(c) 2009-2013 by Jeff Mott. All rights reserved.\ncode.google.com/p/crypto-js/wiki/License\n*/\nvar CryptoJS=CryptoJS||function(a,m){var r={},f=r.lib={},g=function(){},l=f.Base={extend:function(a){g.prototype=this;var b=new g;a&&b.mixIn(a);b.hasOwnProperty(\"init\")||(b.init=function(){b.$super.init.apply(this,arguments)});b.init.prototype=b;b.$super=this;return b},create:function(){var a=this.extend();a.init.apply(a,arguments);return a},init:function(){},mixIn:function(a){for(var b in a)a.hasOwnProperty(b)&&(this[b]=a[b]);a.hasOwnProperty(\"toString\")&&(this.toString=a.toString)},clone:function(){return this.init.prototype.extend(this)}},\np=f.WordArray=l.extend({init:function(a,b){a=this.words=a||[];this.sigBytes=b!=m?b:4*a.length},toString:function(a){return(a||q).stringify(this)},concat:function(a){var b=this.words,d=a.words,c=this.sigBytes;a=a.sigBytes;this.clamp();if(c%4)for(var j=0;j>>2]|=(d[j>>>2]>>>24-8*(j%4)&255)<<24-8*((c+j)%4);else if(65535>>2]=d[j>>>2];else b.push.apply(b,d);this.sigBytes+=a;return this},clamp:function(){var n=this.words,b=this.sigBytes;n[b>>>2]&=4294967295<<\n32-8*(b%4);n.length=a.ceil(b/4)},clone:function(){var a=l.clone.call(this);a.words=this.words.slice(0);return a},random:function(n){for(var b=[],d=0;d>>2]>>>24-8*(c%4)&255;d.push((j>>>4).toString(16));d.push((j&15).toString(16))}return d.join(\"\")},parse:function(a){for(var b=a.length,d=[],c=0;c>>3]|=parseInt(a.substr(c,\n2),16)<<24-4*(c%8);return new p.init(d,b/2)}},G=y.Latin1={stringify:function(a){var b=a.words;a=a.sigBytes;for(var d=[],c=0;c>>2]>>>24-8*(c%4)&255));return d.join(\"\")},parse:function(a){for(var b=a.length,d=[],c=0;c>>2]|=(a.charCodeAt(c)&255)<<24-8*(c%4);return new p.init(d,b)}},fa=y.Utf8={stringify:function(a){try{return decodeURIComponent(escape(G.stringify(a)))}catch(b){throw Error(\"Malformed UTF-8 data\");}},parse:function(a){return G.parse(unescape(encodeURIComponent(a)))}},\nh=f.BufferedBlockAlgorithm=l.extend({reset:function(){this._data=new p.init;this._nDataBytes=0},_append:function(a){\"string\"==typeof a&&(a=fa.parse(a));this._data.concat(a);this._nDataBytes+=a.sigBytes},_process:function(n){var b=this._data,d=b.words,c=b.sigBytes,j=this.blockSize,l=c/(4*j),l=n?a.ceil(l):a.max((l|0)-this._minBufferSize,0);n=l*j;c=a.min(4*n,c);if(n){for(var h=0;hq;q++)y[q]=a();f=f.SHA512=r.extend({_doReset:function(){this._hash=new l.init([new g.init(1779033703,4089235720),new g.init(3144134277,2227873595),new g.init(1013904242,4271175723),new g.init(2773480762,1595750129),new g.init(1359893119,2917565137),new g.init(2600822924,725511199),new g.init(528734635,4215389547),new g.init(1541459225,327033209)])},_doProcessBlock:function(a,f){for(var h=this._hash.words,\ng=h[0],n=h[1],b=h[2],d=h[3],c=h[4],j=h[5],l=h[6],h=h[7],q=g.high,m=g.low,r=n.high,N=n.low,Z=b.high,O=b.low,$=d.high,P=d.low,aa=c.high,Q=c.low,ba=j.high,R=j.low,ca=l.high,S=l.low,da=h.high,T=h.low,v=q,s=m,H=r,E=N,I=Z,F=O,W=$,J=P,w=aa,t=Q,U=ba,K=R,V=ca,L=S,X=da,M=T,x=0;80>x;x++){var B=y[x];if(16>x)var u=B.high=a[f+2*x]|0,e=B.low=a[f+2*x+1]|0;else{var u=y[x-15],e=u.high,z=u.low,u=(e>>>1|z<<31)^(e>>>8|z<<24)^e>>>7,z=(z>>>1|e<<31)^(z>>>8|e<<24)^(z>>>7|e<<25),D=y[x-2],e=D.high,k=D.low,D=(e>>>19|k<<13)^\n(e<<3|k>>>29)^e>>>6,k=(k>>>19|e<<13)^(k<<3|e>>>29)^(k>>>6|e<<26),e=y[x-7],Y=e.high,C=y[x-16],A=C.high,C=C.low,e=z+e.low,u=u+Y+(e>>>0>>0?1:0),e=e+k,u=u+D+(e>>>0>>0?1:0),e=e+C,u=u+A+(e>>>0>>0?1:0);B.high=u;B.low=e}var Y=w&U^~w&V,C=t&K^~t&L,B=v&H^v&I^H&I,ha=s&E^s&F^E&F,z=(v>>>28|s<<4)^(v<<30|s>>>2)^(v<<25|s>>>7),D=(s>>>28|v<<4)^(s<<30|v>>>2)^(s<<25|v>>>7),k=p[x],ia=k.high,ea=k.low,k=M+((t>>>14|w<<18)^(t>>>18|w<<14)^(t<<23|w>>>9)),A=X+((w>>>14|t<<18)^(w>>>18|t<<14)^(w<<23|t>>>9))+(k>>>0>>\n0?1:0),k=k+C,A=A+Y+(k>>>0>>0?1:0),k=k+ea,A=A+ia+(k>>>0>>0?1:0),k=k+e,A=A+u+(k>>>0>>0?1:0),e=D+ha,B=z+B+(e>>>0>>0?1:0),X=V,M=L,V=U,L=K,U=w,K=t,t=J+k|0,w=W+A+(t>>>0>>0?1:0)|0,W=I,J=F,I=H,F=E,H=v,E=s,s=k+e|0,v=A+B+(s>>>0>>0?1:0)|0}m=g.low=m+s;g.high=q+v+(m>>>0>>0?1:0);N=n.low=N+E;n.high=r+H+(N>>>0>>0?1:0);O=b.low=O+F;b.high=Z+I+(O>>>0>>0?1:0);P=d.low=P+J;d.high=$+W+(P>>>0>>0?1:0);Q=c.low=Q+t;c.high=aa+w+(Q>>>0>>0?1:0);R=j.low=R+K;j.high=ba+U+(R>>>0>>0?1:0);S=l.low=\nS+L;l.high=ca+V+(S>>>0>>0?1:0);T=h.low=T+M;h.high=da+X+(T>>>0>>0?1:0)},_doFinalize:function(){var a=this._data,f=a.words,h=8*this._nDataBytes,g=8*a.sigBytes;f[g>>>5]|=128<<24-g%32;f[(g+128>>>10<<5)+30]=Math.floor(h/4294967296);f[(g+128>>>10<<5)+31]=h;a.sigBytes=4*f.length;this._process();return this._hash.toX32()},clone:function(){var a=r.clone.call(this);a._hash=this._hash.clone();return a},blockSize:32});m.SHA512=r._createHelper(f);m.HmacSHA512=r._createHmacHelper(f)})();\n"], "reprlib": [".py", "''\n\n__all__=[\"Repr\",\"repr\",\"recursive_repr\"]\n\nimport builtins\nfrom itertools import islice\ntry :\n from _thread import get_ident\nexcept ImportError:\n from _dummy_thread import get_ident\n \ndef recursive_repr(fillvalue='...'):\n ''\n \n def decorating_function(user_function):\n repr_running=set()\n \n def wrapper(self):\n key=id(self),get_ident()\n if key in repr_running:\n return fillvalue\n repr_running.add(key)\n try :\n result=user_function(self)\n finally :\n repr_running.discard(key)\n return result\n \n \n wrapper.__module__=getattr(user_function,'__module__')\n wrapper.__doc__=getattr(user_function,'__doc__')\n wrapper.__name__=getattr(user_function,'__name__')\n wrapper.__annotations__=getattr(user_function,'__annotations__',{})\n return wrapper\n \n return decorating_function\n \nclass Repr:\n\n def __init__(self):\n self.maxlevel=6\n self.maxtuple=6\n self.maxlist=6\n self.maxarray=5\n self.maxdict=4\n self.maxset=6\n self.maxfrozenset=6\n self.maxdeque=6\n self.maxstring=30\n self.maxlong=40\n self.maxother=30\n \n def repr(self,x):\n return self.repr1(x,self.maxlevel)\n \n def repr1(self,x,level):\n typename=type(x).__name__\n if' 'in typename:\n parts=typename.split()\n typename='_'.join(parts)\n if hasattr(self,'repr_'+typename):\n return getattr(self,'repr_'+typename)(x,level)\n else :\n return self.repr_instance(x,level)\n \n def _repr_iterable(self,x,level,left,right,maxiter,trail=''):\n n=len(x)\n if level <=0 and n:\n s='...'\n else :\n newlevel=level -1\n repr1=self.repr1\n pieces=[repr1(elem,newlevel)for elem in islice(x,maxiter)]\n if n >maxiter:pieces.append('...')\n s=', '.join(pieces)\n if n ==1 and trail:right=trail+right\n return'%s%s%s'%(left,s,right)\n \n def repr_tuple(self,x,level):\n return self._repr_iterable(x,level,'(',')',self.maxtuple,',')\n \n def repr_list(self,x,level):\n return self._repr_iterable(x,level,'[',']',self.maxlist)\n \n def repr_array(self,x,level):\n header=\"array('%s', [\"%x.typecode\n return self._repr_iterable(x,level,header,'])',self.maxarray)\n \n def repr_set(self,x,level):\n x=_possibly_sorted(x)\n return self._repr_iterable(x,level,'set([','])',self.maxset)\n \n def repr_frozenset(self,x,level):\n x=_possibly_sorted(x)\n return self._repr_iterable(x,level,'frozenset([','])',\n self.maxfrozenset)\n \n def repr_deque(self,x,level):\n return self._repr_iterable(x,level,'deque([','])',self.maxdeque)\n \n def repr_dict(self,x,level):\n n=len(x)\n if n ==0:return'{}'\n if level <=0:return'{...}'\n newlevel=level -1\n repr1=self.repr1\n pieces=[]\n for key in islice(_possibly_sorted(x),self.maxdict):\n keyrepr=repr1(key,newlevel)\n valrepr=repr1(x[key],newlevel)\n pieces.append('%s: %s'%(keyrepr,valrepr))\n if n >self.maxdict:pieces.append('...')\n s=', '.join(pieces)\n return'{%s}'%(s,)\n \n def repr_str(self,x,level):\n s=builtins.repr(x[:self.maxstring])\n if len(s)>self.maxstring:\n i=max(0,(self.maxstring -3)//2)\n j=max(0,self.maxstring -3 -i)\n s=builtins.repr(x[:i]+x[len(x)-j:])\n s=s[:i]+'...'+s[len(s)-j:]\n return s\n \n def repr_int(self,x,level):\n s=builtins.repr(x)\n if len(s)>self.maxlong:\n i=max(0,(self.maxlong -3)//2)\n j=max(0,self.maxlong -3 -i)\n s=s[:i]+'...'+s[len(s)-j:]\n return s\n \n def repr_instance(self,x,level):\n try :\n s=builtins.repr(x)\n \n \n except Exception:\n return'<%s instance at %x>'%(x.__class__.__name__,id(x))\n if len(s)>self.maxother:\n i=max(0,(self.maxother -3)//2)\n j=max(0,self.maxother -3 -i)\n s=s[:i]+'...'+s[len(s)-j:]\n return s\n \n \ndef _possibly_sorted(x):\n\n\n\n try :\n return sorted(x)\n except Exception:\n return list(x)\n \naRepr=Repr()\nrepr=aRepr.repr\n"], "bdb": [".py", "''\n\nimport fnmatch\nimport sys\nimport os\nfrom inspect import CO_GENERATOR\n\n__all__=[\"BdbQuit\",\"Bdb\",\"Breakpoint\"]\n\nclass BdbQuit(Exception):\n ''\n \n \nclass Bdb:\n ''\n\n\n\n\n \n \n def __init__(self,skip=None ):\n self.skip=set(skip)if skip else None\n self.breaks={}\n self.fncache={}\n self.frame_returning=None\n \n def canonic(self,filename):\n if filename ==\"<\"+filename[1:-1]+\">\":\n return filename\n canonic=self.fncache.get(filename)\n if not canonic:\n canonic=os.path.abspath(filename)\n canonic=os.path.normcase(canonic)\n self.fncache[filename]=canonic\n return canonic\n \n def reset(self):\n import linecache\n linecache.checkcache()\n self.botframe=None\n self._set_stopinfo(None ,None )\n \n def trace_dispatch(self,frame,event,arg):\n if self.quitting:\n return\n if event =='line':\n return self.dispatch_line(frame)\n if event =='call':\n return self.dispatch_call(frame,arg)\n if event =='return':\n return self.dispatch_return(frame,arg)\n if event =='exception':\n return self.dispatch_exception(frame,arg)\n if event =='c_call':\n return self.trace_dispatch\n if event =='c_exception':\n return self.trace_dispatch\n if event =='c_return':\n return self.trace_dispatch\n print('bdb.Bdb.dispatch: unknown debugging event:',repr(event))\n return self.trace_dispatch\n \n def dispatch_line(self,frame):\n if self.stop_here(frame)or self.break_here(frame):\n self.user_line(frame)\n if self.quitting:raise BdbQuit\n return self.trace_dispatch\n \n def dispatch_call(self,frame,arg):\n \n if self.botframe is None :\n \n self.botframe=frame.f_back\n return self.trace_dispatch\n if not (self.stop_here(frame)or self.break_anywhere(frame)):\n \n return\n \n if self.stopframe and frame.f_code.co_flags&CO_GENERATOR:\n return self.trace_dispatch\n self.user_call(frame,arg)\n if self.quitting:raise BdbQuit\n return self.trace_dispatch\n \n def dispatch_return(self,frame,arg):\n if self.stop_here(frame)or frame ==self.returnframe:\n \n if self.stopframe and frame.f_code.co_flags&CO_GENERATOR:\n return self.trace_dispatch\n try :\n self.frame_returning=frame\n self.user_return(frame,arg)\n finally :\n self.frame_returning=None\n if self.quitting:raise BdbQuit\n \n if self.stopframe is frame and self.stoplineno !=-1:\n self._set_stopinfo(None ,None )\n return self.trace_dispatch\n \n def dispatch_exception(self,frame,arg):\n if self.stop_here(frame):\n \n \n \n if not (frame.f_code.co_flags&CO_GENERATOR\n and arg[0]is StopIteration and arg[2]is None ):\n self.user_exception(frame,arg)\n if self.quitting:raise BdbQuit\n \n \n \n \n elif (self.stopframe and frame is not self.stopframe\n and self.stopframe.f_code.co_flags&CO_GENERATOR\n and arg[0]in (StopIteration,GeneratorExit)):\n self.user_exception(frame,arg)\n if self.quitting:raise BdbQuit\n \n return self.trace_dispatch\n \n \n \n \n \n def is_skipped_module(self,module_name):\n for pattern in self.skip:\n if fnmatch.fnmatch(module_name,pattern):\n return True\n return False\n \n def stop_here(self,frame):\n \n \n if self.skip and self.is_skipped_module(frame.f_globals.get('__name__')):\n return False\n if frame is self.stopframe:\n if self.stoplineno ==-1:\n return False\n return frame.f_lineno >=self.stoplineno\n if not self.stopframe:\n return True\n return False\n \n def break_here(self,frame):\n filename=self.canonic(frame.f_code.co_filename)\n if filename not in self.breaks:\n return False\n lineno=frame.f_lineno\n if lineno not in self.breaks[filename]:\n \n \n lineno=frame.f_code.co_firstlineno\n if lineno not in self.breaks[filename]:\n return False\n \n \n (bp,flag)=effective(filename,lineno,frame)\n if bp:\n self.currentbp=bp.number\n if (flag and bp.temporary):\n self.do_clear(str(bp.number))\n return True\n else :\n return False\n \n def do_clear(self,arg):\n raise NotImplementedError(\"subclass of bdb must implement do_clear()\")\n \n def break_anywhere(self,frame):\n return self.canonic(frame.f_code.co_filename)in self.breaks\n \n \n \n \n def user_call(self,frame,argument_list):\n ''\n \n pass\n \n def user_line(self,frame):\n ''\n pass\n \n def user_return(self,frame,return_value):\n ''\n pass\n \n def user_exception(self,frame,exc_info):\n ''\n \n pass\n \n def _set_stopinfo(self,stopframe,returnframe,stoplineno=0):\n self.stopframe=stopframe\n self.returnframe=returnframe\n self.quitting=False\n \n \n self.stoplineno=stoplineno\n \n \n \n \n def set_until(self,frame,lineno=None ):\n ''\n \n \n if lineno is None :\n lineno=frame.f_lineno+1\n self._set_stopinfo(frame,frame,lineno)\n \n def set_step(self):\n ''\n \n \n \n \n if self.frame_returning:\n caller_frame=self.frame_returning.f_back\n if caller_frame and not caller_frame.f_trace:\n caller_frame.f_trace=self.trace_dispatch\n self._set_stopinfo(None ,None )\n \n def set_next(self,frame):\n ''\n self._set_stopinfo(frame,None )\n \n def set_return(self,frame):\n ''\n if frame.f_code.co_flags&CO_GENERATOR:\n self._set_stopinfo(frame,None ,-1)\n else :\n self._set_stopinfo(frame.f_back,frame)\n \n def set_trace(self,frame=None ):\n ''\n\n\n \n if frame is None :\n frame=sys._getframe().f_back\n self.reset()\n while frame:\n frame.f_trace=self.trace_dispatch\n self.botframe=frame\n frame=frame.f_back\n self.set_step()\n sys.settrace(self.trace_dispatch)\n \n def set_continue(self):\n \n self._set_stopinfo(self.botframe,None ,-1)\n if not self.breaks:\n \n sys.settrace(None )\n frame=sys._getframe().f_back\n while frame and frame is not self.botframe:\n del frame.f_trace\n frame=frame.f_back\n \n def set_quit(self):\n self.stopframe=self.botframe\n self.returnframe=None\n self.quitting=True\n sys.settrace(None )\n \n \n \n \n \n \n \n \n def set_break(self,filename,lineno,temporary=False ,cond=None ,\n funcname=None ):\n filename=self.canonic(filename)\n import linecache\n line=linecache.getline(filename,lineno)\n if not line:\n return'Line %s:%d does not exist'%(filename,lineno)\n list=self.breaks.setdefault(filename,[])\n if lineno not in list:\n list.append(lineno)\n bp=Breakpoint(filename,lineno,temporary,cond,funcname)\n \n def _prune_breaks(self,filename,lineno):\n if (filename,lineno)not in Breakpoint.bplist:\n self.breaks[filename].remove(lineno)\n if not self.breaks[filename]:\n del self.breaks[filename]\n \n def clear_break(self,filename,lineno):\n filename=self.canonic(filename)\n if filename not in self.breaks:\n return'There are no breakpoints in %s'%filename\n if lineno not in self.breaks[filename]:\n return'There is no breakpoint at %s:%d'%(filename,lineno)\n \n \n for bp in Breakpoint.bplist[filename,lineno][:]:\n bp.deleteMe()\n self._prune_breaks(filename,lineno)\n \n def clear_bpbynumber(self,arg):\n try :\n bp=self.get_bpbynumber(arg)\n except ValueError as err:\n return str(err)\n bp.deleteMe()\n self._prune_breaks(bp.file,bp.line)\n \n def clear_all_file_breaks(self,filename):\n filename=self.canonic(filename)\n if filename not in self.breaks:\n return'There are no breakpoints in %s'%filename\n for line in self.breaks[filename]:\n blist=Breakpoint.bplist[filename,line]\n for bp in blist:\n bp.deleteMe()\n del self.breaks[filename]\n \n def clear_all_breaks(self):\n if not self.breaks:\n return'There are no breakpoints'\n for bp in Breakpoint.bpbynumber:\n if bp:\n bp.deleteMe()\n self.breaks={}\n \n def get_bpbynumber(self,arg):\n if not arg:\n raise ValueError('Breakpoint number expected')\n try :\n number=int(arg)\n except ValueError:\n raise ValueError('Non-numeric breakpoint number %s'%arg)\n try :\n bp=Breakpoint.bpbynumber[number]\n except IndexError:\n raise ValueError('Breakpoint number %d out of range'%number)\n if bp is None :\n raise ValueError('Breakpoint %d already deleted'%number)\n return bp\n \n def get_break(self,filename,lineno):\n filename=self.canonic(filename)\n return filename in self.breaks and lineno in self.breaks[filename]\n \n def get_breaks(self,filename,lineno):\n filename=self.canonic(filename)\n return filename in self.breaks and lineno in self.breaks[filename]and Breakpoint.bplist[filename,lineno]or []\n \n def get_file_breaks(self,filename):\n filename=self.canonic(filename)\n if filename in self.breaks:\n return self.breaks[filename]\n else :\n return []\n \n def get_all_breaks(self):\n return self.breaks\n \n \n \n \n def get_stack(self,f,t):\n stack=[]\n if t and t.tb_frame is f:\n t=t.tb_next\n while f is not None :\n stack.append((f,f.f_lineno))\n if f is self.botframe:\n break\n f=f.f_back\n stack.reverse()\n i=max(0,len(stack)-1)\n while t is not None :\n stack.append((t.tb_frame,t.tb_lineno))\n t=t.tb_next\n if f is None :\n i=max(0,len(stack)-1)\n return stack,i\n \n def format_stack_entry(self,frame_lineno,lprefix=': '):\n import linecache,reprlib\n frame,lineno=frame_lineno\n filename=self.canonic(frame.f_code.co_filename)\n s='%s(%r)'%(filename,lineno)\n if frame.f_code.co_name:\n s +=frame.f_code.co_name\n else :\n s +=\"\"\n if'__args__'in frame.f_locals:\n args=frame.f_locals['__args__']\n else :\n args=None\n if args:\n s +=reprlib.repr(args)\n else :\n s +='()'\n if'__return__'in frame.f_locals:\n rv=frame.f_locals['__return__']\n s +='->'\n s +=reprlib.repr(rv)\n line=linecache.getline(filename,lineno,frame.f_globals)\n if line:\n s +=lprefix+line.strip()\n return s\n \n \n \n \n \n def run(self,cmd,globals=None ,locals=None ):\n if globals is None :\n import __main__\n globals=__main__.__dict__\n if locals is None :\n locals=globals\n self.reset()\n if isinstance(cmd,str):\n cmd=compile(cmd,\"\",\"exec\")\n sys.settrace(self.trace_dispatch)\n try :\n exec(cmd,globals,locals)\n except BdbQuit:\n pass\n finally :\n self.quitting=True\n sys.settrace(None )\n \n def runeval(self,expr,globals=None ,locals=None ):\n if globals is None :\n import __main__\n globals=__main__.__dict__\n if locals is None :\n locals=globals\n self.reset()\n sys.settrace(self.trace_dispatch)\n try :\n return eval(expr,globals,locals)\n except BdbQuit:\n pass\n finally :\n self.quitting=True\n sys.settrace(None )\n \n def runctx(self,cmd,globals,locals):\n \n self.run(cmd,globals,locals)\n \n \n \n def runcall(self,func,*args,**kwds):\n self.reset()\n sys.settrace(self.trace_dispatch)\n res=None\n try :\n res=func(*args,**kwds)\n except BdbQuit:\n pass\n finally :\n self.quitting=True\n sys.settrace(None )\n return res\n \n \ndef set_trace():\n Bdb().set_trace()\n \n \nclass Breakpoint:\n ''\n\n\n\n\n\n\n\n\n\n\n \n \n \n \n \n next=1\n bplist={}\n bpbynumber=[None ]\n \n \n \n def __init__(self,file,line,temporary=False ,cond=None ,funcname=None ):\n self.funcname=funcname\n \n self.func_first_executable_line=None\n self.file=file\n self.line=line\n self.temporary=temporary\n self.cond=cond\n self.enabled=True\n self.ignore=0\n self.hits=0\n self.number=Breakpoint.next\n Breakpoint.next +=1\n \n self.bpbynumber.append(self)\n if (file,line)in self.bplist:\n self.bplist[file,line].append(self)\n else :\n self.bplist[file,line]=[self]\n \n def deleteMe(self):\n index=(self.file,self.line)\n self.bpbynumber[self.number]=None\n self.bplist[index].remove(self)\n if not self.bplist[index]:\n \n del self.bplist[index]\n \n def enable(self):\n self.enabled=True\n \n def disable(self):\n self.enabled=False\n \n def bpprint(self,out=None ):\n if out is None :\n out=sys.stdout\n print(self.bpformat(),file=out)\n \n def bpformat(self):\n if self.temporary:\n disp='del '\n else :\n disp='keep '\n if self.enabled:\n disp=disp+'yes '\n else :\n disp=disp+'no '\n ret='%-4dbreakpoint %s at %s:%d'%(self.number,disp,\n self.file,self.line)\n if self.cond:\n ret +='\\n\\tstop only if %s'%(self.cond,)\n if self.ignore:\n ret +='\\n\\tignore next %d hits'%(self.ignore,)\n if self.hits:\n if self.hits >1:\n ss='s'\n else :\n ss=''\n ret +='\\n\\tbreakpoint already hit %d time%s'%(self.hits,ss)\n return ret\n \n def __str__(self):\n return'breakpoint %s at %s:%s'%(self.number,self.file,self.line)\n \n \n \ndef checkfuncname(b,frame):\n ''\n if not b.funcname:\n \n if b.line !=frame.f_lineno:\n \n \n return False\n return True\n \n \n \n if frame.f_code.co_name !=b.funcname:\n \n return False\n \n \n if not b.func_first_executable_line:\n \n b.func_first_executable_line=frame.f_lineno\n \n if b.func_first_executable_line !=frame.f_lineno:\n \n return False\n return True\n \n \n \ndef effective(file,line,frame):\n ''\n\n\n\n\n\n \n possibles=Breakpoint.bplist[file,line]\n for b in possibles:\n if not b.enabled:\n continue\n if not checkfuncname(b,frame):\n continue\n \n b.hits +=1\n if not b.cond:\n \n if b.ignore >0:\n b.ignore -=1\n continue\n else :\n \n return (b,True )\n else :\n \n \n \n try :\n val=eval(b.cond,frame.f_globals,frame.f_locals)\n if val:\n if b.ignore >0:\n b.ignore -=1\n \n else :\n return (b,True )\n \n \n except :\n \n \n \n return (b,False )\n return (None ,None )\n \n \n \n \nclass Tdb(Bdb):\n def user_call(self,frame,args):\n name=frame.f_code.co_name\n if not name:name='???'\n print('+++ call',name,args)\n def user_line(self,frame):\n import linecache\n name=frame.f_code.co_name\n if not name:name='???'\n fn=self.canonic(frame.f_code.co_filename)\n line=linecache.getline(fn,frame.f_lineno,frame.f_globals)\n print('+++',fn,frame.f_lineno,name,':',line.strip())\n def user_return(self,frame,retval):\n print('+++ return',retval)\n def user_exception(self,frame,exc_stuff):\n print('+++ exception',exc_stuff)\n self.set_continue()\n \ndef foo(n):\n print('foo(',n,')')\n x=bar(n *10)\n print('bar returned',x)\n \ndef bar(a):\n print('bar(',a,')')\n return a /2\n \ndef test():\n t=Tdb()\n t.run('import bdb; bdb.foo(10)')\n"], "fractions": [".py", "\n\n\n\"\"\"Fraction, infinite-precision, real numbers.\"\"\"\n\nfrom decimal import Decimal\nimport math\nimport numbers\nimport operator\nimport re\nimport sys\n\n__all__=['Fraction','gcd']\n\n\n\ndef gcd(a,b):\n ''\n\n\n\n \n while b:\n a,b=b,a %b\n return a\n \n \n \n_PyHASH_MODULUS=sys.hash_info.modulus\n\n\n_PyHASH_INF=sys.hash_info.inf\n\n_RATIONAL_FORMAT=re.compile(r\"\"\"\n \\A\\s* # optional whitespace at the start, then\n (?P[-+]?) # an optional sign, then\n (?=\\d|\\.\\d) # lookahead for digit or .digit\n (?P\\d*) # numerator (possibly empty)\n (?: # followed by\n (?:/(?P\\d+))? # an optional denominator\n | # or\n (?:\\.(?P\\d*))? # an optional fractional part\n (?:E(?P[-+]?\\d+))? # and optional exponent\n )\n \\s*\\Z # and optional whitespace to finish\n\"\"\",re.VERBOSE |re.IGNORECASE)\n\n\nclass Fraction(numbers.Rational):\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n \n __slots__=('_numerator','_denominator')\n \n \n def __new__(cls,numerator=0,denominator=None ):\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n self=super(Fraction,cls).__new__(cls)\n \n if denominator is None :\n if isinstance(numerator,numbers.Rational):\n self._numerator=numerator.numerator\n self._denominator=numerator.denominator\n return self\n \n elif isinstance(numerator,float):\n \n value=Fraction.from_float(numerator)\n self._numerator=value._numerator\n self._denominator=value._denominator\n return self\n \n elif isinstance(numerator,Decimal):\n value=Fraction.from_decimal(numerator)\n self._numerator=value._numerator\n self._denominator=value._denominator\n return self\n \n elif isinstance(numerator,str):\n \n m=_RATIONAL_FORMAT.match(numerator)\n if m is None :\n raise ValueError('Invalid literal for Fraction: %r'%\n numerator)\n numerator=int(m.group('num')or'0')\n denom=m.group('denom')\n if denom:\n denominator=int(denom)\n else :\n denominator=1\n decimal=m.group('decimal')\n if decimal:\n scale=10 **len(decimal)\n numerator=numerator *scale+int(decimal)\n denominator *=scale\n exp=m.group('exp')\n if exp:\n exp=int(exp)\n if exp >=0:\n numerator *=10 **exp\n else :\n denominator *=10 **-exp\n if m.group('sign')=='-':\n numerator=-numerator\n \n else :\n raise TypeError(\"argument should be a string \"\n \"or a Rational instance\")\n \n elif (isinstance(numerator,numbers.Rational)and\n isinstance(denominator,numbers.Rational)):\n numerator,denominator=(\n numerator.numerator *denominator.denominator,\n denominator.numerator *numerator.denominator\n )\n else :\n raise TypeError(\"both arguments should be \"\n \"Rational instances\")\n \n if denominator ==0:\n raise ZeroDivisionError('Fraction(%s, 0)'%numerator)\n g=gcd(numerator,denominator)\n self._numerator=numerator //g\n self._denominator=denominator //g\n return self\n \n @classmethod\n def from_float(cls,f):\n ''\n\n\n\n \n if isinstance(f,numbers.Integral):\n return cls(f)\n elif not isinstance(f,float):\n raise TypeError(\"%s.from_float() only takes floats, not %r (%s)\"%\n (cls.__name__,f,type(f).__name__))\n if math.isnan(f):\n raise ValueError(\"Cannot convert %r to %s.\"%(f,cls.__name__))\n if math.isinf(f):\n raise OverflowError(\"Cannot convert %r to %s.\"%(f,cls.__name__))\n return cls(*f.as_integer_ratio())\n \n @classmethod\n def from_decimal(cls,dec):\n ''\n from decimal import Decimal\n if isinstance(dec,numbers.Integral):\n dec=Decimal(int(dec))\n elif not isinstance(dec,Decimal):\n raise TypeError(\n \"%s.from_decimal() only takes Decimals, not %r (%s)\"%\n (cls.__name__,dec,type(dec).__name__))\n if dec.is_infinite():\n raise OverflowError(\n \"Cannot convert %s to %s.\"%(dec,cls.__name__))\n if dec.is_nan():\n raise ValueError(\"Cannot convert %s to %s.\"%(dec,cls.__name__))\n sign,digits,exp=dec.as_tuple()\n digits=int(''.join(map(str,digits)))\n if sign:\n digits=-digits\n if exp >=0:\n return cls(digits *10 **exp)\n else :\n return cls(digits,10 **-exp)\n \n def limit_denominator(self,max_denominator=1000000):\n ''\n\n\n\n\n\n\n\n\n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n if max_denominator <1:\n raise ValueError(\"max_denominator should be at least 1\")\n if self._denominator <=max_denominator:\n return Fraction(self)\n \n p0,q0,p1,q1=0,1,1,0\n n,d=self._numerator,self._denominator\n while True :\n a=n //d\n q2=q0+a *q1\n if q2 >max_denominator:\n break\n p0,q0,p1,q1=p1,q1,p0+a *p1,q2\n n,d=d,n -a *d\n \n k=(max_denominator -q0)//q1\n bound1=Fraction(p0+k *p1,q0+k *q1)\n bound2=Fraction(p1,q1)\n if abs(bound2 -self)<=abs(bound1 -self):\n return bound2\n else :\n return bound1\n \n @property\n def numerator(a):\n return a._numerator\n \n @property\n def denominator(a):\n return a._denominator\n \n def __repr__(self):\n ''\n return ('Fraction(%s, %s)'%(self._numerator,self._denominator))\n \n def __str__(self):\n ''\n if self._denominator ==1:\n return str(self._numerator)\n else :\n return'%s/%s'%(self._numerator,self._denominator)\n \n def _operator_fallbacks(monomorphic_operator,fallback_operator):\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n def forward(a,b):\n if isinstance(b,(int,Fraction)):\n return monomorphic_operator(a,b)\n elif isinstance(b,float):\n return fallback_operator(float(a),b)\n elif isinstance(b,complex):\n return fallback_operator(complex(a),b)\n else :\n return NotImplemented\n forward.__name__='__'+fallback_operator.__name__+'__'\n forward.__doc__=monomorphic_operator.__doc__\n \n def reverse(b,a):\n if isinstance(a,numbers.Rational):\n \n return monomorphic_operator(a,b)\n elif isinstance(a,numbers.Real):\n return fallback_operator(float(a),float(b))\n elif isinstance(a,numbers.Complex):\n return fallback_operator(complex(a),complex(b))\n else :\n return NotImplemented\n reverse.__name__='__r'+fallback_operator.__name__+'__'\n reverse.__doc__=monomorphic_operator.__doc__\n \n return forward,reverse\n \n def _add(a,b):\n ''\n return Fraction(a.numerator *b.denominator+\n b.numerator *a.denominator,\n a.denominator *b.denominator)\n \n __add__,__radd__=_operator_fallbacks(_add,operator.add)\n \n def _sub(a,b):\n ''\n return Fraction(a.numerator *b.denominator -\n b.numerator *a.denominator,\n a.denominator *b.denominator)\n \n __sub__,__rsub__=_operator_fallbacks(_sub,operator.sub)\n \n def _mul(a,b):\n ''\n return Fraction(a.numerator *b.numerator,a.denominator *b.denominator)\n \n __mul__,__rmul__=_operator_fallbacks(_mul,operator.mul)\n \n def _div(a,b):\n ''\n return Fraction(a.numerator *b.denominator,\n a.denominator *b.numerator)\n \n __truediv__,__rtruediv__=_operator_fallbacks(_div,operator.truediv)\n \n def __floordiv__(a,b):\n ''\n return math.floor(a /b)\n \n def __rfloordiv__(b,a):\n ''\n return math.floor(a /b)\n \n def __mod__(a,b):\n ''\n div=a //b\n return a -b *div\n \n def __rmod__(b,a):\n ''\n div=a //b\n return a -b *div\n \n def __pow__(a,b):\n ''\n\n\n\n\n\n \n if isinstance(b,numbers.Rational):\n if b.denominator ==1:\n power=b.numerator\n if power >=0:\n return Fraction(a._numerator **power,\n a._denominator **power)\n else :\n return Fraction(a._denominator **-power,\n a._numerator **-power)\n else :\n \n \n return float(a)**float(b)\n else :\n return float(a)**b\n \n def __rpow__(b,a):\n ''\n if b._denominator ==1 and b._numerator >=0:\n \n return a **b._numerator\n \n if isinstance(a,numbers.Rational):\n return Fraction(a.numerator,a.denominator)**b\n \n if b._denominator ==1:\n return a **b._numerator\n \n return a **float(b)\n \n def __pos__(a):\n ''\n return Fraction(a._numerator,a._denominator)\n \n def __neg__(a):\n ''\n return Fraction(-a._numerator,a._denominator)\n \n def __abs__(a):\n ''\n return Fraction(abs(a._numerator),a._denominator)\n \n def __trunc__(a):\n ''\n if a._numerator <0:\n return -(-a._numerator //a._denominator)\n else :\n return a._numerator //a._denominator\n \n def __floor__(a):\n ''\n return a.numerator //a.denominator\n \n def __ceil__(a):\n ''\n \n return -(-a.numerator //a.denominator)\n \n def __round__(self,ndigits=None ):\n ''\n\n\n \n if ndigits is None :\n floor,remainder=divmod(self.numerator,self.denominator)\n if remainder *2 self.denominator:\n return floor+1\n \n elif floor %2 ==0:\n return floor\n else :\n return floor+1\n shift=10 **abs(ndigits)\n \n \n \n if ndigits >0:\n return Fraction(round(self *shift),shift)\n else :\n return Fraction(round(self /shift)*shift)\n \n def __hash__(self):\n ''\n \n \n \n \n \n \n \n \n \n \n \n \n dinv=pow(self._denominator,_PyHASH_MODULUS -2,_PyHASH_MODULUS)\n if not dinv:\n hash_=_PyHASH_INF\n else :\n hash_=abs(self._numerator)*dinv %_PyHASH_MODULUS\n result=hash_ if self >=0 else -hash_\n return -2 if result ==-1 else result\n \n def __eq__(a,b):\n ''\n if isinstance(b,numbers.Rational):\n return (a._numerator ==b.numerator and\n a._denominator ==b.denominator)\n if isinstance(b,numbers.Complex)and b.imag ==0:\n b=b.real\n if isinstance(b,float):\n if math.isnan(b)or math.isinf(b):\n \n \n return 0.0 ==b\n else :\n return a ==a.from_float(b)\n else :\n \n \n return NotImplemented\n \n def _richcmp(self,other,op):\n ''\n\n\n\n\n\n\n\n \n \n if isinstance(other,numbers.Rational):\n return op(self._numerator *other.denominator,\n self._denominator *other.numerator)\n if isinstance(other,float):\n if math.isnan(other)or math.isinf(other):\n return op(0.0,other)\n else :\n return op(self,self.from_float(other))\n else :\n return NotImplemented\n \n def __lt__(a,b):\n ''\n return a._richcmp(b,operator.lt)\n \n def __gt__(a,b):\n ''\n return a._richcmp(b,operator.gt)\n \n def __le__(a,b):\n ''\n return a._richcmp(b,operator.le)\n \n def __ge__(a,b):\n ''\n return a._richcmp(b,operator.ge)\n \n def __bool__(a):\n ''\n return a._numerator !=0\n \n \n \n def __reduce__(self):\n return (self.__class__,(str(self),))\n \n def __copy__(self):\n if type(self)==Fraction:\n return self\n return self.__class__(self._numerator,self._denominator)\n \n def __deepcopy__(self,memo):\n if type(self)==Fraction:\n return self\n return self.__class__(self._numerator,self._denominator)\n"], "encodings.iso8859_11": [".py", "''\n\n\n\nimport codecs\n\n\n\nclass Codec(codecs.Codec):\n\n def encode(self,input,errors='strict'):\n return codecs.charmap_encode(input,errors,encoding_table)\n \n def decode(self,input,errors='strict'):\n return codecs.charmap_decode(input,errors,decoding_table)\n \nclass IncrementalEncoder(codecs.IncrementalEncoder):\n def encode(self,input,final=False ):\n return codecs.charmap_encode(input,self.errors,encoding_table)[0]\n \nclass IncrementalDecoder(codecs.IncrementalDecoder):\n def decode(self,input,final=False ):\n return codecs.charmap_decode(input,self.errors,decoding_table)[0]\n \nclass StreamWriter(Codec,codecs.StreamWriter):\n pass\n \nclass StreamReader(Codec,codecs.StreamReader):\n pass\n \n \n \ndef getregentry():\n return codecs.CodecInfo(\n name='iso8859-11',\n encode=Codec().encode,\n decode=Codec().decode,\n incrementalencoder=IncrementalEncoder,\n incrementaldecoder=IncrementalDecoder,\n streamreader=StreamReader,\n streamwriter=StreamWriter,\n )\n \n \n \n \ndecoding_table=(\n'\\x00'\n'\\x01'\n'\\x02'\n'\\x03'\n'\\x04'\n'\\x05'\n'\\x06'\n'\\x07'\n'\\x08'\n'\\t'\n'\\n'\n'\\x0b'\n'\\x0c'\n'\\r'\n'\\x0e'\n'\\x0f'\n'\\x10'\n'\\x11'\n'\\x12'\n'\\x13'\n'\\x14'\n'\\x15'\n'\\x16'\n'\\x17'\n'\\x18'\n'\\x19'\n'\\x1a'\n'\\x1b'\n'\\x1c'\n'\\x1d'\n'\\x1e'\n'\\x1f'\n' '\n'!'\n'\"'\n'#'\n'$'\n'%'\n'&'\n\"'\"\n'('\n')'\n'*'\n'+'\n','\n'-'\n'.'\n'/'\n'0'\n'1'\n'2'\n'3'\n'4'\n'5'\n'6'\n'7'\n'8'\n'9'\n':'\n';'\n'<'\n'='\n'>'\n'?'\n'@'\n'A'\n'B'\n'C'\n'D'\n'E'\n'F'\n'G'\n'H'\n'I'\n'J'\n'K'\n'L'\n'M'\n'N'\n'O'\n'P'\n'Q'\n'R'\n'S'\n'T'\n'U'\n'V'\n'W'\n'X'\n'Y'\n'Z'\n'['\n'\\\\'\n']'\n'^'\n'_'\n'`'\n'a'\n'b'\n'c'\n'd'\n'e'\n'f'\n'g'\n'h'\n'i'\n'j'\n'k'\n'l'\n'm'\n'n'\n'o'\n'p'\n'q'\n'r'\n's'\n't'\n'u'\n'v'\n'w'\n'x'\n'y'\n'z'\n'{'\n'|'\n'}'\n'~'\n'\\x7f'\n'\\x80'\n'\\x81'\n'\\x82'\n'\\x83'\n'\\x84'\n'\\x85'\n'\\x86'\n'\\x87'\n'\\x88'\n'\\x89'\n'\\x8a'\n'\\x8b'\n'\\x8c'\n'\\x8d'\n'\\x8e'\n'\\x8f'\n'\\x90'\n'\\x91'\n'\\x92'\n'\\x93'\n'\\x94'\n'\\x95'\n'\\x96'\n'\\x97'\n'\\x98'\n'\\x99'\n'\\x9a'\n'\\x9b'\n'\\x9c'\n'\\x9d'\n'\\x9e'\n'\\x9f'\n'\\xa0'\n'\\u0e01'\n'\\u0e02'\n'\\u0e03'\n'\\u0e04'\n'\\u0e05'\n'\\u0e06'\n'\\u0e07'\n'\\u0e08'\n'\\u0e09'\n'\\u0e0a'\n'\\u0e0b'\n'\\u0e0c'\n'\\u0e0d'\n'\\u0e0e'\n'\\u0e0f'\n'\\u0e10'\n'\\u0e11'\n'\\u0e12'\n'\\u0e13'\n'\\u0e14'\n'\\u0e15'\n'\\u0e16'\n'\\u0e17'\n'\\u0e18'\n'\\u0e19'\n'\\u0e1a'\n'\\u0e1b'\n'\\u0e1c'\n'\\u0e1d'\n'\\u0e1e'\n'\\u0e1f'\n'\\u0e20'\n'\\u0e21'\n'\\u0e22'\n'\\u0e23'\n'\\u0e24'\n'\\u0e25'\n'\\u0e26'\n'\\u0e27'\n'\\u0e28'\n'\\u0e29'\n'\\u0e2a'\n'\\u0e2b'\n'\\u0e2c'\n'\\u0e2d'\n'\\u0e2e'\n'\\u0e2f'\n'\\u0e30'\n'\\u0e31'\n'\\u0e32'\n'\\u0e33'\n'\\u0e34'\n'\\u0e35'\n'\\u0e36'\n'\\u0e37'\n'\\u0e38'\n'\\u0e39'\n'\\u0e3a'\n'\\ufffe'\n'\\ufffe'\n'\\ufffe'\n'\\ufffe'\n'\\u0e3f'\n'\\u0e40'\n'\\u0e41'\n'\\u0e42'\n'\\u0e43'\n'\\u0e44'\n'\\u0e45'\n'\\u0e46'\n'\\u0e47'\n'\\u0e48'\n'\\u0e49'\n'\\u0e4a'\n'\\u0e4b'\n'\\u0e4c'\n'\\u0e4d'\n'\\u0e4e'\n'\\u0e4f'\n'\\u0e50'\n'\\u0e51'\n'\\u0e52'\n'\\u0e53'\n'\\u0e54'\n'\\u0e55'\n'\\u0e56'\n'\\u0e57'\n'\\u0e58'\n'\\u0e59'\n'\\u0e5a'\n'\\u0e5b'\n'\\ufffe'\n'\\ufffe'\n'\\ufffe'\n'\\ufffe'\n)\n\n\nencoding_table=codecs.charmap_build(decoding_table)\n"], "xml.parsers": [".py", "''\n\n\n\n\n\n\n\n", 1], "subprocess": [".py", "\n\n\n\n\n\n\n\n\nr\"\"\"subprocess - Subprocesses with accessible I/O streams\n\nThis module allows you to spawn processes, connect to their\ninput/output/error pipes, and obtain their return codes. This module\nintends to replace several other, older modules and functions, like:\n\nos.system\nos.spawn*\n\nInformation about how the subprocess module can be used to replace these\nmodules and functions can be found below.\n\n\n\nUsing the subprocess module\n===========================\nThis module defines one class called Popen:\n\nclass Popen(args, bufsize=-1, executable=None,\n stdin=None, stdout=None, stderr=None,\n preexec_fn=None, close_fds=True, shell=False,\n cwd=None, env=None, universal_newlines=False,\n startupinfo=None, creationflags=0,\n restore_signals=True, start_new_session=False, pass_fds=()):\n\n\nArguments are:\n\nargs should be a string, or a sequence of program arguments. The\nprogram to execute is normally the first item in the args sequence or\nstring, but can be explicitly set by using the executable argument.\n\nOn POSIX, with shell=False (default): In this case, the Popen class\nuses os.execvp() to execute the child program. args should normally\nbe a sequence. A string will be treated as a sequence with the string\nas the only item (the program to execute).\n\nOn POSIX, with shell=True: If args is a string, it specifies the\ncommand string to execute through the shell. If args is a sequence,\nthe first item specifies the command string, and any additional items\nwill be treated as additional shell arguments.\n\nOn Windows: the Popen class uses CreateProcess() to execute the child\nprogram, which operates on strings. If args is a sequence, it will be\nconverted to a string using the list2cmdline method. Please note that\nnot all MS Windows applications interpret the command line the same\nway: The list2cmdline is designed for applications using the same\nrules as the MS C runtime.\n\nbufsize will be supplied as the corresponding argument to the io.open()\nfunction when creating the stdin/stdout/stderr pipe file objects:\n0 means unbuffered (read & write are one system call and can return short),\n1 means line buffered, any other positive value means use a buffer of\napproximately that size. A negative bufsize, the default, means the system\ndefault of io.DEFAULT_BUFFER_SIZE will be used.\n\nstdin, stdout and stderr specify the executed programs' standard\ninput, standard output and standard error file handles, respectively.\nValid values are PIPE, an existing file descriptor (a positive\ninteger), an existing file object, and None. PIPE indicates that a\nnew pipe to the child should be created. With None, no redirection\nwill occur; the child's file handles will be inherited from the\nparent. Additionally, stderr can be STDOUT, which indicates that the\nstderr data from the applications should be captured into the same\nfile handle as for stdout.\n\nOn POSIX, if preexec_fn is set to a callable object, this object will be\ncalled in the child process just before the child is executed. The use\nof preexec_fn is not thread safe, using it in the presence of threads\ncould lead to a deadlock in the child process before the new executable\nis executed.\n\nIf close_fds is true, all file descriptors except 0, 1 and 2 will be\nclosed before the child process is executed. The default for close_fds\nvaries by platform: Always true on POSIX. True when stdin/stdout/stderr\nare None on Windows, false otherwise.\n\npass_fds is an optional sequence of file descriptors to keep open between the\nparent and child. Providing any pass_fds implicitly sets close_fds to true.\n\nif shell is true, the specified command will be executed through the\nshell.\n\nIf cwd is not None, the current directory will be changed to cwd\nbefore the child is executed.\n\nOn POSIX, if restore_signals is True all signals that Python sets to\nSIG_IGN are restored to SIG_DFL in the child process before the exec.\nCurrently this includes the SIGPIPE, SIGXFZ and SIGXFSZ signals. This\nparameter does nothing on Windows.\n\nOn POSIX, if start_new_session is True, the setsid() system call will be made\nin the child process prior to executing the command.\n\nIf env is not None, it defines the environment variables for the new\nprocess.\n\nIf universal_newlines is false, the file objects stdin, stdout and stderr\nare opened as binary files, and no line ending conversion is done.\n\nIf universal_newlines is true, the file objects stdout and stderr are\nopened as a text files, but lines may be terminated by any of '\\n',\nthe Unix end-of-line convention, '\\r', the old Macintosh convention or\n'\\r\\n', the Windows convention. All of these external representations\nare seen as '\\n' by the Python program. Also, the newlines attribute\nof the file objects stdout, stdin and stderr are not updated by the\ncommunicate() method.\n\nThe startupinfo and creationflags, if given, will be passed to the\nunderlying CreateProcess() function. They can specify things such as\nappearance of the main window and priority for the new process.\n(Windows only)\n\n\nThis module also defines some shortcut functions:\n\ncall(*popenargs, **kwargs):\n Run command with arguments. Wait for command to complete, then\n return the returncode attribute.\n\n The arguments are the same as for the Popen constructor. Example:\n\n >>> retcode = subprocess.call([\"ls\", \"-l\"])\n\ncheck_call(*popenargs, **kwargs):\n Run command with arguments. Wait for command to complete. If the\n exit code was zero then return, otherwise raise\n CalledProcessError. The CalledProcessError object will have the\n return code in the returncode attribute.\n\n The arguments are the same as for the Popen constructor. Example:\n\n >>> subprocess.check_call([\"ls\", \"-l\"])\n 0\n\ngetstatusoutput(cmd):\n Return (status, output) of executing cmd in a shell.\n\n Execute the string 'cmd' in a shell with os.popen() and return a 2-tuple\n (status, output). cmd is actually run as '{ cmd ; } 2>&1', so that the\n returned output will contain output or error messages. A trailing newline\n is stripped from the output. The exit status for the command can be\n interpreted according to the rules for the C function wait(). Example:\n\n >>> subprocess.getstatusoutput('ls /bin/ls')\n (0, '/bin/ls')\n >>> subprocess.getstatusoutput('cat /bin/junk')\n (256, 'cat: /bin/junk: No such file or directory')\n >>> subprocess.getstatusoutput('/bin/junk')\n (256, 'sh: /bin/junk: not found')\n\ngetoutput(cmd):\n Return output (stdout or stderr) of executing cmd in a shell.\n\n Like getstatusoutput(), except the exit status is ignored and the return\n value is a string containing the command's output. Example:\n\n >>> subprocess.getoutput('ls /bin/ls')\n '/bin/ls'\n\ncheck_output(*popenargs, **kwargs):\n Run command with arguments and return its output.\n\n If the exit code was non-zero it raises a CalledProcessError. The\n CalledProcessError object will have the return code in the returncode\n attribute and output in the output attribute.\n\n The arguments are the same as for the Popen constructor. Example:\n\n >>> output = subprocess.check_output([\"ls\", \"-l\", \"/dev/null\"])\n\n\nExceptions\n----------\nExceptions raised in the child process, before the new program has\nstarted to execute, will be re-raised in the parent. Additionally,\nthe exception object will have one extra attribute called\n'child_traceback', which is a string containing traceback information\nfrom the child's point of view.\n\nThe most common exception raised is OSError. This occurs, for\nexample, when trying to execute a non-existent file. Applications\nshould prepare for OSErrors.\n\nA ValueError will be raised if Popen is called with invalid arguments.\n\nExceptions defined within this module inherit from SubprocessError.\ncheck_call() and check_output() will raise CalledProcessError if the\ncalled process returns a non-zero return code. TimeoutExpired\nbe raised if a timeout was specified and expired.\n\n\nSecurity\n--------\nUnlike some other popen functions, this implementation will never call\n/bin/sh implicitly. This means that all characters, including shell\nmetacharacters, can safely be passed to child processes.\n\n\nPopen objects\n=============\nInstances of the Popen class have the following methods:\n\npoll()\n Check if child process has terminated. Returns returncode\n attribute.\n\nwait()\n Wait for child process to terminate. Returns returncode attribute.\n\ncommunicate(input=None)\n Interact with process: Send data to stdin. Read data from stdout\n and stderr, until end-of-file is reached. Wait for process to\n terminate. The optional input argument should be a string to be\n sent to the child process, or None, if no data should be sent to\n the child.\n\n communicate() returns a tuple (stdout, stderr).\n\n Note: The data read is buffered in memory, so do not use this\n method if the data size is large or unlimited.\n\nThe following attributes are also available:\n\nstdin\n If the stdin argument is PIPE, this attribute is a file object\n that provides input to the child process. Otherwise, it is None.\n\nstdout\n If the stdout argument is PIPE, this attribute is a file object\n that provides output from the child process. Otherwise, it is\n None.\n\nstderr\n If the stderr argument is PIPE, this attribute is file object that\n provides error output from the child process. Otherwise, it is\n None.\n\npid\n The process ID of the child process.\n\nreturncode\n The child return code. A None value indicates that the process\n hasn't terminated yet. A negative value -N indicates that the\n child was terminated by signal N (POSIX only).\n\n\nReplacing older functions with the subprocess module\n====================================================\nIn this section, \"a ==> b\" means that b can be used as a replacement\nfor a.\n\nNote: All functions in this section fail (more or less) silently if\nthe executed program cannot be found; this module raises an OSError\nexception.\n\nIn the following examples, we assume that the subprocess module is\nimported with \"from subprocess import *\".\n\n\nReplacing /bin/sh shell backquote\n---------------------------------\noutput=`mycmd myarg`\n==>\noutput = Popen([\"mycmd\", \"myarg\"], stdout=PIPE).communicate()[0]\n\n\nReplacing shell pipe line\n-------------------------\noutput=`dmesg | grep hda`\n==>\np1 = Popen([\"dmesg\"], stdout=PIPE)\np2 = Popen([\"grep\", \"hda\"], stdin=p1.stdout, stdout=PIPE)\noutput = p2.communicate()[0]\n\n\nReplacing os.system()\n---------------------\nsts = os.system(\"mycmd\" + \" myarg\")\n==>\np = Popen(\"mycmd\" + \" myarg\", shell=True)\npid, sts = os.waitpid(p.pid, 0)\n\nNote:\n\n* Calling the program through the shell is usually not required.\n\n* It's easier to look at the returncode attribute than the\n exitstatus.\n\nA more real-world example would look like this:\n\ntry:\n retcode = call(\"mycmd\" + \" myarg\", shell=True)\n if retcode < 0:\n print(\"Child was terminated by signal\", -retcode, file=sys.stderr)\n else:\n print(\"Child returned\", retcode, file=sys.stderr)\nexcept OSError as e:\n print(\"Execution failed:\", e, file=sys.stderr)\n\n\nReplacing os.spawn*\n-------------------\nP_NOWAIT example:\n\npid = os.spawnlp(os.P_NOWAIT, \"/bin/mycmd\", \"mycmd\", \"myarg\")\n==>\npid = Popen([\"/bin/mycmd\", \"myarg\"]).pid\n\n\nP_WAIT example:\n\nretcode = os.spawnlp(os.P_WAIT, \"/bin/mycmd\", \"mycmd\", \"myarg\")\n==>\nretcode = call([\"/bin/mycmd\", \"myarg\"])\n\n\nVector example:\n\nos.spawnvp(os.P_NOWAIT, path, args)\n==>\nPopen([path] + args[1:])\n\n\nEnvironment example:\n\nos.spawnlpe(os.P_NOWAIT, \"/bin/mycmd\", \"mycmd\", \"myarg\", env)\n==>\nPopen([\"/bin/mycmd\", \"myarg\"], env={\"PATH\": \"/usr/bin\"})\n\"\"\"\n\nimport sys\nmswindows=(sys.platform ==\"win32\")\n\nimport io\nimport os\nimport time\nimport traceback\nimport gc\nimport signal\nimport builtins\nimport warnings\nimport errno\ntry :\n from time import monotonic as _time\nexcept ImportError:\n from time import time as _time\n \n \nclass SubprocessError(Exception):pass\n\n\nclass CalledProcessError(SubprocessError):\n ''\n\n\n\n \n def __init__(self,returncode,cmd,output=None ):\n self.returncode=returncode\n self.cmd=cmd\n self.output=output\n def __str__(self):\n return\"Command '%s' returned non-zero exit status %d\"%(self.cmd,self.returncode)\n \n \nclass TimeoutExpired(SubprocessError):\n ''\n\n \n def __init__(self,cmd,timeout,output=None ):\n self.cmd=cmd\n self.timeout=timeout\n self.output=output\n \n def __str__(self):\n return (\"Command '%s' timed out after %s seconds\"%\n (self.cmd,self.timeout))\n \n \nif mswindows:\n import threading\n import msvcrt\n import _winapi\n class STARTUPINFO:\n dwFlags=0\n hStdInput=None\n hStdOutput=None\n hStdError=None\n wShowWindow=0\n class pywintypes:\n error=IOError\nelse :\n import select\n _has_poll=hasattr(select,'poll')\n import _posixsubprocess\n _create_pipe=_posixsubprocess.cloexec_pipe\n \n \n \n \n _PIPE_BUF=getattr(select,'PIPE_BUF',512)\n \n \n__all__=[\"Popen\",\"PIPE\",\"STDOUT\",\"call\",\"check_call\",\"getstatusoutput\",\n\"getoutput\",\"check_output\",\"CalledProcessError\",\"DEVNULL\"]\n\nif mswindows:\n from _winapi import (CREATE_NEW_CONSOLE,CREATE_NEW_PROCESS_GROUP,\n STD_INPUT_HANDLE,STD_OUTPUT_HANDLE,\n STD_ERROR_HANDLE,SW_HIDE,\n STARTF_USESTDHANDLES,STARTF_USESHOWWINDOW)\n \n __all__.extend([\"CREATE_NEW_CONSOLE\",\"CREATE_NEW_PROCESS_GROUP\",\n \"STD_INPUT_HANDLE\",\"STD_OUTPUT_HANDLE\",\n \"STD_ERROR_HANDLE\",\"SW_HIDE\",\n \"STARTF_USESTDHANDLES\",\"STARTF_USESHOWWINDOW\"])\n \n class Handle(int):\n closed=False\n \n def Close(self,CloseHandle=_winapi.CloseHandle):\n if not self.closed:\n self.closed=True\n CloseHandle(self)\n \n def Detach(self):\n if not self.closed:\n self.closed=True\n return int(self)\n raise ValueError(\"already closed\")\n \n def __repr__(self):\n return\"Handle(%d)\"%int(self)\n \n __del__=Close\n __str__=__repr__\n \ntry :\n MAXFD=os.sysconf(\"SC_OPEN_MAX\")\nexcept :\n MAXFD=256\n \n \n \n \n \n_active=[]\n\ndef _cleanup():\n for inst in _active[:]:\n res=inst._internal_poll(_deadstate=sys.maxsize)\n if res is not None :\n try :\n _active.remove(inst)\n except ValueError:\n \n \n pass\n \nPIPE=-1\nSTDOUT=-2\nDEVNULL=-3\n\n\ndef _eintr_retry_call(func,*args):\n while True :\n try :\n return func(*args)\n except InterruptedError:\n continue\n \n \n \n \n \n \ndef _args_from_interpreter_flags():\n ''\n \n flag_opt_map={\n 'debug':'d',\n \n \n 'optimize':'O',\n 'dont_write_bytecode':'B',\n 'no_user_site':'s',\n 'no_site':'S',\n 'ignore_environment':'E',\n 'verbose':'v',\n 'bytes_warning':'b',\n 'quiet':'q',\n 'hash_randomization':'R',\n }\n args=[]\n for flag,opt in flag_opt_map.items():\n v=getattr(sys.flags,flag)\n if v >0:\n args.append('-'+opt *v)\n for opt in sys.warnoptions:\n args.append('-W'+opt)\n return args\n \n \ndef call(*popenargs,timeout=None ,**kwargs):\n ''\n\n\n\n\n\n \n with Popen(*popenargs,**kwargs)as p:\n try :\n return p.wait(timeout=timeout)\n except :\n p.kill()\n p.wait()\n raise\n \n \ndef check_call(*popenargs,**kwargs):\n ''\n\n\n\n\n\n\n\n \n retcode=call(*popenargs,**kwargs)\n if retcode:\n cmd=kwargs.get(\"args\")\n if cmd is None :\n cmd=popenargs[0]\n raise CalledProcessError(retcode,cmd)\n return 0\n \n \ndef check_output(*popenargs,timeout=None ,**kwargs):\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n if'stdout'in kwargs:\n raise ValueError('stdout argument not allowed, it will be overridden.')\n with Popen(*popenargs,stdout=PIPE,**kwargs)as process:\n try :\n output,unused_err=process.communicate(timeout=timeout)\n except TimeoutExpired:\n process.kill()\n output,unused_err=process.communicate()\n raise TimeoutExpired(process.args,timeout,output=output)\n except :\n process.kill()\n process.wait()\n raise\n retcode=process.poll()\n if retcode:\n raise CalledProcessError(retcode,process.args,output=output)\n return output\n \n \ndef list2cmdline(seq):\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n \n \n \n \n \n result=[]\n needquote=False\n for arg in seq:\n bs_buf=[]\n \n \n if result:\n result.append(' ')\n \n needquote=(\" \"in arg)or (\"\\t\"in arg)or not arg\n if needquote:\n result.append('\"')\n \n for c in arg:\n if c =='\\\\':\n \n bs_buf.append(c)\n elif c =='\"':\n \n result.append('\\\\'*len(bs_buf)*2)\n bs_buf=[]\n result.append('\\\\\"')\n else :\n \n if bs_buf:\n result.extend(bs_buf)\n bs_buf=[]\n result.append(c)\n \n \n if bs_buf:\n result.extend(bs_buf)\n \n if needquote:\n result.extend(bs_buf)\n result.append('\"')\n \n return''.join(result)\n \n \n \n \n \n \ndef getstatusoutput(cmd):\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n with os.popen('{ '+cmd+'; } 2>&1','r')as pipe:\n try :\n text=pipe.read()\n sts=pipe.close()\n except :\n process=pipe._proc\n process.kill()\n process.wait()\n raise\n if sts is None :\n sts=0\n if text[-1:]=='\\n':\n text=text[:-1]\n return sts,text\n \n \ndef getoutput(cmd):\n ''\n\n\n\n\n\n\n\n \n return getstatusoutput(cmd)[1]\n \n \n_PLATFORM_DEFAULT_CLOSE_FDS=object()\n\n\nclass Popen(object):\n def __init__(self,args,bufsize=-1,executable=None ,\n stdin=None ,stdout=None ,stderr=None ,\n preexec_fn=None ,close_fds=_PLATFORM_DEFAULT_CLOSE_FDS,\n shell=False ,cwd=None ,env=None ,universal_newlines=False ,\n startupinfo=None ,creationflags=0,\n restore_signals=True ,start_new_session=False ,\n pass_fds=()):\n ''\n _cleanup()\n \n self._child_created=False\n self._input=None\n self._communication_started=False\n if bufsize is None :\n bufsize=-1\n if not isinstance(bufsize,int):\n raise TypeError(\"bufsize must be an integer\")\n \n if mswindows:\n if preexec_fn is not None :\n raise ValueError(\"preexec_fn is not supported on Windows \"\n \"platforms\")\n any_stdio_set=(stdin is not None or stdout is not None or\n stderr is not None )\n if close_fds is _PLATFORM_DEFAULT_CLOSE_FDS:\n if any_stdio_set:\n close_fds=False\n else :\n close_fds=True\n elif close_fds and any_stdio_set:\n raise ValueError(\n \"close_fds is not supported on Windows platforms\"\n \" if you redirect stdin/stdout/stderr\")\n else :\n \n if close_fds is _PLATFORM_DEFAULT_CLOSE_FDS:\n close_fds=True\n if pass_fds and not close_fds:\n warnings.warn(\"pass_fds overriding close_fds.\",RuntimeWarning)\n close_fds=True\n if startupinfo is not None :\n raise ValueError(\"startupinfo is only supported on Windows \"\n \"platforms\")\n if creationflags !=0:\n raise ValueError(\"creationflags is only supported on Windows \"\n \"platforms\")\n \n self.args=args\n self.stdin=None\n self.stdout=None\n self.stderr=None\n self.pid=None\n self.returncode=None\n self.universal_newlines=universal_newlines\n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n (p2cread,p2cwrite,\n c2pread,c2pwrite,\n errread,errwrite)=self._get_handles(stdin,stdout,stderr)\n \n \n \n \n \n \n \n \n \n \n \n \n \n \n if p2cwrite !=-1:\n self.stdin=io.open(p2cwrite,'wb',bufsize)\n if universal_newlines:\n self.stdin=io.TextIOWrapper(self.stdin,write_through=True )\n if c2pread !=-1:\n self.stdout=io.open(c2pread,'rb',bufsize)\n if universal_newlines:\n self.stdout=io.TextIOWrapper(self.stdout)\n if errread !=-1:\n self.stderr=io.open(errread,'rb',bufsize)\n if universal_newlines:\n self.stderr=io.TextIOWrapper(self.stderr)\n \n self._closed_child_pipe_fds=False\n try :\n self._execute_child(args,executable,preexec_fn,close_fds,\n pass_fds,cwd,env,\n startupinfo,creationflags,shell,\n p2cread,p2cwrite,\n c2pread,c2pwrite,\n errread,errwrite,\n restore_signals,start_new_session)\n except :\n \n for f in filter(None ,(self.stdin,self.stdout,self.stderr)):\n try :\n f.close()\n except EnvironmentError:\n pass\n \n if not self._closed_child_pipe_fds:\n to_close=[]\n if stdin ==PIPE:\n to_close.append(p2cread)\n if stdout ==PIPE:\n to_close.append(c2pwrite)\n if stderr ==PIPE:\n to_close.append(errwrite)\n if hasattr(self,'_devnull'):\n to_close.append(self._devnull)\n for fd in to_close:\n try :\n os.close(fd)\n except EnvironmentError:\n pass\n \n raise\n \n \n def _translate_newlines(self,data,encoding):\n data=data.decode(encoding)\n return data.replace(\"\\r\\n\",\"\\n\").replace(\"\\r\",\"\\n\")\n \n def __enter__(self):\n return self\n \n def __exit__(self,type,value,traceback):\n if self.stdout:\n self.stdout.close()\n if self.stderr:\n self.stderr.close()\n if self.stdin:\n self.stdin.close()\n \n self.wait()\n \n def __del__(self,_maxsize=sys.maxsize,_active=_active):\n \n \n \n if not getattr(self,'_child_created',False ):\n \n return\n \n self._internal_poll(_deadstate=_maxsize)\n if self.returncode is None and _active is not None :\n \n _active.append(self)\n \n def _get_devnull(self):\n if not hasattr(self,'_devnull'):\n self._devnull=os.open(os.devnull,os.O_RDWR)\n return self._devnull\n \n def communicate(self,input=None ,timeout=None ):\n ''\n\n\n\n\n\n \n \n if self._communication_started and input:\n raise ValueError(\"Cannot send input after starting communication\")\n \n \n \n \n if (timeout is None and not self._communication_started and\n [self.stdin,self.stdout,self.stderr].count(None )>=2):\n stdout=None\n stderr=None\n if self.stdin:\n if input:\n try :\n self.stdin.write(input)\n except IOError as e:\n if e.errno !=errno.EPIPE and e.errno !=errno.EINVAL:\n raise\n self.stdin.close()\n elif self.stdout:\n stdout=_eintr_retry_call(self.stdout.read)\n self.stdout.close()\n elif self.stderr:\n stderr=_eintr_retry_call(self.stderr.read)\n self.stderr.close()\n self.wait()\n else :\n if timeout is not None :\n endtime=_time()+timeout\n else :\n endtime=None\n \n try :\n stdout,stderr=self._communicate(input,endtime,timeout)\n finally :\n self._communication_started=True\n \n sts=self.wait(timeout=self._remaining_time(endtime))\n \n return (stdout,stderr)\n \n \n def poll(self):\n return self._internal_poll()\n \n \n def _remaining_time(self,endtime):\n ''\n if endtime is None :\n return None\n else :\n return endtime -_time()\n \n \n def _check_timeout(self,endtime,orig_timeout):\n ''\n if endtime is None :\n return\n if _time()>endtime:\n raise TimeoutExpired(self.args,orig_timeout)\n \n \n if mswindows:\n \n \n \n def _get_handles(self,stdin,stdout,stderr):\n ''\n\n \n if stdin is None and stdout is None and stderr is None :\n return (-1,-1,-1,-1,-1,-1)\n \n p2cread,p2cwrite=-1,-1\n c2pread,c2pwrite=-1,-1\n errread,errwrite=-1,-1\n \n if stdin is None :\n p2cread=_winapi.GetStdHandle(_winapi.STD_INPUT_HANDLE)\n if p2cread is None :\n p2cread,_=_winapi.CreatePipe(None ,0)\n p2cread=Handle(p2cread)\n _winapi.CloseHandle(_)\n elif stdin ==PIPE:\n p2cread,p2cwrite=_winapi.CreatePipe(None ,0)\n p2cread,p2cwrite=Handle(p2cread),Handle(p2cwrite)\n elif stdin ==DEVNULL:\n p2cread=msvcrt.get_osfhandle(self._get_devnull())\n elif isinstance(stdin,int):\n p2cread=msvcrt.get_osfhandle(stdin)\n else :\n \n p2cread=msvcrt.get_osfhandle(stdin.fileno())\n p2cread=self._make_inheritable(p2cread)\n \n if stdout is None :\n c2pwrite=_winapi.GetStdHandle(_winapi.STD_OUTPUT_HANDLE)\n if c2pwrite is None :\n _,c2pwrite=_winapi.CreatePipe(None ,0)\n c2pwrite=Handle(c2pwrite)\n _winapi.CloseHandle(_)\n elif stdout ==PIPE:\n c2pread,c2pwrite=_winapi.CreatePipe(None ,0)\n c2pread,c2pwrite=Handle(c2pread),Handle(c2pwrite)\n elif stdout ==DEVNULL:\n c2pwrite=msvcrt.get_osfhandle(self._get_devnull())\n elif isinstance(stdout,int):\n c2pwrite=msvcrt.get_osfhandle(stdout)\n else :\n \n c2pwrite=msvcrt.get_osfhandle(stdout.fileno())\n c2pwrite=self._make_inheritable(c2pwrite)\n \n if stderr is None :\n errwrite=_winapi.GetStdHandle(_winapi.STD_ERROR_HANDLE)\n if errwrite is None :\n _,errwrite=_winapi.CreatePipe(None ,0)\n errwrite=Handle(errwrite)\n _winapi.CloseHandle(_)\n elif stderr ==PIPE:\n errread,errwrite=_winapi.CreatePipe(None ,0)\n errread,errwrite=Handle(errread),Handle(errwrite)\n elif stderr ==STDOUT:\n errwrite=c2pwrite\n elif stderr ==DEVNULL:\n errwrite=msvcrt.get_osfhandle(self._get_devnull())\n elif isinstance(stderr,int):\n errwrite=msvcrt.get_osfhandle(stderr)\n else :\n \n errwrite=msvcrt.get_osfhandle(stderr.fileno())\n errwrite=self._make_inheritable(errwrite)\n \n return (p2cread,p2cwrite,\n c2pread,c2pwrite,\n errread,errwrite)\n \n \n def _make_inheritable(self,handle):\n ''\n h=_winapi.DuplicateHandle(\n _winapi.GetCurrentProcess(),handle,\n _winapi.GetCurrentProcess(),0,1,\n _winapi.DUPLICATE_SAME_ACCESS)\n return Handle(h)\n \n \n def _find_w9xpopen(self):\n ''\n w9xpopen=os.path.join(\n os.path.dirname(_winapi.GetModuleFileName(0)),\n \"w9xpopen.exe\")\n if not os.path.exists(w9xpopen):\n \n \n w9xpopen=os.path.join(os.path.dirname(sys.base_exec_prefix),\n \"w9xpopen.exe\")\n if not os.path.exists(w9xpopen):\n raise RuntimeError(\"Cannot locate w9xpopen.exe, which is \"\n \"needed for Popen to work with your \"\n \"shell or platform.\")\n return w9xpopen\n \n \n def _execute_child(self,args,executable,preexec_fn,close_fds,\n pass_fds,cwd,env,\n startupinfo,creationflags,shell,\n p2cread,p2cwrite,\n c2pread,c2pwrite,\n errread,errwrite,\n unused_restore_signals,unused_start_new_session):\n ''\n \n assert not pass_fds,\"pass_fds not supported on Windows.\"\n \n if not isinstance(args,str):\n args=list2cmdline(args)\n \n \n if startupinfo is None :\n startupinfo=STARTUPINFO()\n if -1 not in (p2cread,c2pwrite,errwrite):\n startupinfo.dwFlags |=_winapi.STARTF_USESTDHANDLES\n startupinfo.hStdInput=p2cread\n startupinfo.hStdOutput=c2pwrite\n startupinfo.hStdError=errwrite\n \n if shell:\n startupinfo.dwFlags |=_winapi.STARTF_USESHOWWINDOW\n startupinfo.wShowWindow=_winapi.SW_HIDE\n comspec=os.environ.get(\"COMSPEC\",\"cmd.exe\")\n args='{} /c \"{}\"'.format(comspec,args)\n if (_winapi.GetVersion()>=0x80000000 or\n os.path.basename(comspec).lower()==\"command.com\"):\n \n \n \n \n w9xpopen=self._find_w9xpopen()\n args='\"%s\" %s'%(w9xpopen,args)\n \n \n \n \n \n \n creationflags |=_winapi.CREATE_NEW_CONSOLE\n \n \n try :\n hp,ht,pid,tid=_winapi.CreateProcess(executable,args,\n \n None ,None ,\n int(not close_fds),\n creationflags,\n env,\n cwd,\n startupinfo)\n except pywintypes.error as e:\n \n \n \n \n raise WindowsError(*e.args)\n finally :\n \n \n \n \n \n \n if p2cread !=-1:\n p2cread.Close()\n if c2pwrite !=-1:\n c2pwrite.Close()\n if errwrite !=-1:\n errwrite.Close()\n if hasattr(self,'_devnull'):\n os.close(self._devnull)\n \n \n self._child_created=True\n self._handle=Handle(hp)\n self.pid=pid\n _winapi.CloseHandle(ht)\n \n def _internal_poll(self,_deadstate=None ,\n _WaitForSingleObject=_winapi.WaitForSingleObject,\n _WAIT_OBJECT_0=_winapi.WAIT_OBJECT_0,\n _GetExitCodeProcess=_winapi.GetExitCodeProcess):\n ''\n\n\n\n\n\n \n if self.returncode is None :\n if _WaitForSingleObject(self._handle,0)==_WAIT_OBJECT_0:\n self.returncode=_GetExitCodeProcess(self._handle)\n return self.returncode\n \n \n def wait(self,timeout=None ,endtime=None ):\n ''\n \n if endtime is not None :\n timeout=self._remaining_time(endtime)\n if timeout is None :\n timeout_millis=_winapi.INFINITE\n else :\n timeout_millis=int(timeout *1000)\n if self.returncode is None :\n result=_winapi.WaitForSingleObject(self._handle,\n timeout_millis)\n if result ==_winapi.WAIT_TIMEOUT:\n raise TimeoutExpired(self.args,timeout)\n self.returncode=_winapi.GetExitCodeProcess(self._handle)\n return self.returncode\n \n \n def _readerthread(self,fh,buffer):\n buffer.append(fh.read())\n fh.close()\n \n \n def _communicate(self,input,endtime,orig_timeout):\n \n \n if self.stdout and not hasattr(self,\"_stdout_buff\"):\n self._stdout_buff=[]\n self.stdout_thread= threading.Thread(target=self._readerthread,\n args=(self.stdout,self._stdout_buff))\n self.stdout_thread.daemon=True\n self.stdout_thread.start()\n if self.stderr and not hasattr(self,\"_stderr_buff\"):\n self._stderr_buff=[]\n self.stderr_thread= threading.Thread(target=self._readerthread,\n args=(self.stderr,self._stderr_buff))\n self.stderr_thread.daemon=True\n self.stderr_thread.start()\n \n if self.stdin:\n if input is not None :\n try :\n self.stdin.write(input)\n except IOError as e:\n if e.errno !=errno.EPIPE:\n raise\n self.stdin.close()\n \n \n \n \n if self.stdout is not None :\n self.stdout_thread.join(self._remaining_time(endtime))\n if self.stdout_thread.is_alive():\n raise TimeoutExpired(self.args,orig_timeout)\n if self.stderr is not None :\n self.stderr_thread.join(self._remaining_time(endtime))\n if self.stderr_thread.is_alive():\n raise TimeoutExpired(self.args,orig_timeout)\n \n \n \n stdout=None\n stderr=None\n if self.stdout:\n stdout=self._stdout_buff\n self.stdout.close()\n if self.stderr:\n stderr=self._stderr_buff\n self.stderr.close()\n \n \n if stdout is not None :\n stdout=stdout[0]\n if stderr is not None :\n stderr=stderr[0]\n \n return (stdout,stderr)\n \n def send_signal(self,sig):\n ''\n \n if sig ==signal.SIGTERM:\n self.terminate()\n elif sig ==signal.CTRL_C_EVENT:\n os.kill(self.pid,signal.CTRL_C_EVENT)\n elif sig ==signal.CTRL_BREAK_EVENT:\n os.kill(self.pid,signal.CTRL_BREAK_EVENT)\n else :\n raise ValueError(\"Unsupported signal: {}\".format(sig))\n \n def terminate(self):\n ''\n \n try :\n _winapi.TerminateProcess(self._handle,1)\n except PermissionError:\n \n \n rc=_winapi.GetExitCodeProcess(self._handle)\n if rc ==_winapi.STILL_ACTIVE:\n raise\n self.returncode=rc\n \n kill=terminate\n \n else :\n \n \n \n def _get_handles(self,stdin,stdout,stderr):\n ''\n\n \n p2cread,p2cwrite=-1,-1\n c2pread,c2pwrite=-1,-1\n errread,errwrite=-1,-1\n \n if stdin is None :\n pass\n elif stdin ==PIPE:\n p2cread,p2cwrite=_create_pipe()\n elif stdin ==DEVNULL:\n p2cread=self._get_devnull()\n elif isinstance(stdin,int):\n p2cread=stdin\n else :\n \n p2cread=stdin.fileno()\n \n if stdout is None :\n pass\n elif stdout ==PIPE:\n c2pread,c2pwrite=_create_pipe()\n elif stdout ==DEVNULL:\n c2pwrite=self._get_devnull()\n elif isinstance(stdout,int):\n c2pwrite=stdout\n else :\n \n c2pwrite=stdout.fileno()\n \n if stderr is None :\n pass\n elif stderr ==PIPE:\n errread,errwrite=_create_pipe()\n elif stderr ==STDOUT:\n errwrite=c2pwrite\n elif stderr ==DEVNULL:\n errwrite=self._get_devnull()\n elif isinstance(stderr,int):\n errwrite=stderr\n else :\n \n errwrite=stderr.fileno()\n \n return (p2cread,p2cwrite,\n c2pread,c2pwrite,\n errread,errwrite)\n \n \n def _close_fds(self,fds_to_keep):\n start_fd=3\n for fd in sorted(fds_to_keep):\n if fd >=start_fd:\n os.closerange(start_fd,fd)\n start_fd=fd+1\n if start_fd <=MAXFD:\n os.closerange(start_fd,MAXFD)\n \n \n def _execute_child(self,args,executable,preexec_fn,close_fds,\n pass_fds,cwd,env,\n startupinfo,creationflags,shell,\n p2cread,p2cwrite,\n c2pread,c2pwrite,\n errread,errwrite,\n restore_signals,start_new_session):\n ''\n \n if isinstance(args,(str,bytes)):\n args=[args]\n else :\n args=list(args)\n \n if shell:\n args=[\"/bin/sh\",\"-c\"]+args\n if executable:\n args[0]=executable\n \n if executable is None :\n executable=args[0]\n orig_executable=executable\n \n \n \n \n errpipe_read,errpipe_write=_create_pipe()\n try :\n try :\n \n \n \n \n \n if env is not None :\n env_list=[os.fsencode(k)+b'='+os.fsencode(v)\n for k,v in env.items()]\n else :\n env_list=None\n executable=os.fsencode(executable)\n if os.path.dirname(executable):\n executable_list=(executable,)\n else :\n \n executable_list=tuple(\n os.path.join(os.fsencode(dir),executable)\n for dir in os.get_exec_path(env))\n fds_to_keep=set(pass_fds)\n fds_to_keep.add(errpipe_write)\n self.pid=_posixsubprocess.fork_exec(\n args,executable_list,\n close_fds,sorted(fds_to_keep),cwd,env_list,\n p2cread,p2cwrite,c2pread,c2pwrite,\n errread,errwrite,\n errpipe_read,errpipe_write,\n restore_signals,start_new_session,preexec_fn)\n self._child_created=True\n finally :\n \n os.close(errpipe_write)\n \n \n devnull_fd=getattr(self,'_devnull',None )\n if p2cread !=-1 and p2cwrite !=-1 and p2cread !=devnull_fd:\n os.close(p2cread)\n if c2pwrite !=-1 and c2pread !=-1 and c2pwrite !=devnull_fd:\n os.close(c2pwrite)\n if errwrite !=-1 and errread !=-1 and errwrite !=devnull_fd:\n os.close(errwrite)\n if devnull_fd is not None :\n os.close(devnull_fd)\n \n self._closed_child_pipe_fds=True\n \n \n \n errpipe_data=bytearray()\n while True :\n part=_eintr_retry_call(os.read,errpipe_read,50000)\n errpipe_data +=part\n if not part or len(errpipe_data)>50000:\n break\n finally :\n \n os.close(errpipe_read)\n \n if errpipe_data:\n try :\n _eintr_retry_call(os.waitpid,self.pid,0)\n except OSError as e:\n if e.errno !=errno.ECHILD:\n raise\n try :\n exception_name,hex_errno,err_msg=(\n errpipe_data.split(b':',2))\n except ValueError:\n exception_name=b'RuntimeError'\n hex_errno=b'0'\n err_msg=(b'Bad exception data from child: '+\n repr(errpipe_data))\n child_exception_type=getattr(\n builtins,exception_name.decode('ascii'),\n RuntimeError)\n err_msg=err_msg.decode(errors=\"surrogatepass\")\n if issubclass(child_exception_type,OSError)and hex_errno:\n errno_num=int(hex_errno,16)\n child_exec_never_called=(err_msg ==\"noexec\")\n if child_exec_never_called:\n err_msg=\"\"\n if errno_num !=0:\n err_msg=os.strerror(errno_num)\n if errno_num ==errno.ENOENT:\n if child_exec_never_called:\n \n err_msg +=': '+repr(cwd)\n else :\n err_msg +=': '+repr(orig_executable)\n raise child_exception_type(errno_num,err_msg)\n raise child_exception_type(err_msg)\n \n \n def _handle_exitstatus(self,sts,_WIFSIGNALED=os.WIFSIGNALED,\n _WTERMSIG=os.WTERMSIG,_WIFEXITED=os.WIFEXITED,\n _WEXITSTATUS=os.WEXITSTATUS):\n \n \n if _WIFSIGNALED(sts):\n self.returncode=-_WTERMSIG(sts)\n elif _WIFEXITED(sts):\n self.returncode=_WEXITSTATUS(sts)\n else :\n \n raise RuntimeError(\"Unknown child exit status!\")\n \n \n def _internal_poll(self,_deadstate=None ,_waitpid=os.waitpid,\n _WNOHANG=os.WNOHANG,_os_error=os.error,_ECHILD=errno.ECHILD):\n ''\n\n\n\n\n\n \n if self.returncode is None :\n try :\n pid,sts=_waitpid(self.pid,_WNOHANG)\n if pid ==self.pid:\n self._handle_exitstatus(sts)\n except _os_error as e:\n if _deadstate is not None :\n self.returncode=_deadstate\n elif e.errno ==_ECHILD:\n \n \n \n \n \n self.returncode=0\n return self.returncode\n \n \n def _try_wait(self,wait_flags):\n try :\n (pid,sts)=_eintr_retry_call(os.waitpid,self.pid,wait_flags)\n except OSError as e:\n if e.errno !=errno.ECHILD:\n raise\n \n \n \n pid=self.pid\n sts=0\n return (pid,sts)\n \n \n def wait(self,timeout=None ,endtime=None ):\n ''\n \n if self.returncode is not None :\n return self.returncode\n \n \n \n if endtime is not None or timeout is not None :\n if endtime is None :\n endtime=_time()+timeout\n elif timeout is None :\n timeout=self._remaining_time(endtime)\n \n if endtime is not None :\n \n \n delay=0.0005\n while True :\n (pid,sts)=self._try_wait(os.WNOHANG)\n assert pid ==self.pid or pid ==0\n if pid ==self.pid:\n self._handle_exitstatus(sts)\n break\n remaining=self._remaining_time(endtime)\n if remaining <=0:\n raise TimeoutExpired(self.args,timeout)\n delay=min(delay *2,remaining,.05)\n time.sleep(delay)\n else :\n while self.returncode is None :\n (pid,sts)=self._try_wait(0)\n \n \n if pid ==self.pid:\n self._handle_exitstatus(sts)\n return self.returncode\n \n \n def _communicate(self,input,endtime,orig_timeout):\n if self.stdin and not self._communication_started:\n \n \n self.stdin.flush()\n if not input:\n self.stdin.close()\n \n if _has_poll:\n stdout,stderr=self._communicate_with_poll(input,endtime,\n orig_timeout)\n else :\n stdout,stderr=self._communicate_with_select(input,endtime,\n orig_timeout)\n \n self.wait(timeout=self._remaining_time(endtime))\n \n \n if stdout is not None :\n stdout=b''.join(stdout)\n if stderr is not None :\n stderr=b''.join(stderr)\n \n \n \n if self.universal_newlines:\n if stdout is not None :\n stdout=self._translate_newlines(stdout,\n self.stdout.encoding)\n if stderr is not None :\n stderr=self._translate_newlines(stderr,\n self.stderr.encoding)\n \n return (stdout,stderr)\n \n \n def _save_input(self,input):\n \n \n \n if self.stdin and self._input is None :\n self._input_offset=0\n self._input=input\n if self.universal_newlines and input is not None :\n self._input=self._input.encode(self.stdin.encoding)\n \n \n def _communicate_with_poll(self,input,endtime,orig_timeout):\n stdout=None\n stderr=None\n \n if not self._communication_started:\n self._fd2file={}\n \n poller=select.poll()\n def register_and_append(file_obj,eventmask):\n poller.register(file_obj.fileno(),eventmask)\n self._fd2file[file_obj.fileno()]=file_obj\n \n def close_unregister_and_remove(fd):\n poller.unregister(fd)\n self._fd2file[fd].close()\n self._fd2file.pop(fd)\n \n if self.stdin and input:\n register_and_append(self.stdin,select.POLLOUT)\n \n \n if not self._communication_started:\n self._fd2output={}\n if self.stdout:\n self._fd2output[self.stdout.fileno()]=[]\n if self.stderr:\n self._fd2output[self.stderr.fileno()]=[]\n \n select_POLLIN_POLLPRI=select.POLLIN |select.POLLPRI\n if self.stdout:\n register_and_append(self.stdout,select_POLLIN_POLLPRI)\n stdout=self._fd2output[self.stdout.fileno()]\n if self.stderr:\n register_and_append(self.stderr,select_POLLIN_POLLPRI)\n stderr=self._fd2output[self.stderr.fileno()]\n \n self._save_input(input)\n \n while self._fd2file:\n timeout=self._remaining_time(endtime)\n if timeout is not None and timeout <0:\n raise TimeoutExpired(self.args,orig_timeout)\n try :\n ready=poller.poll(timeout)\n except select.error as e:\n if e.args[0]==errno.EINTR:\n continue\n raise\n self._check_timeout(endtime,orig_timeout)\n \n \n \n \n for fd,mode in ready:\n if mode&select.POLLOUT:\n chunk=self._input[self._input_offset:\n self._input_offset+_PIPE_BUF]\n try :\n self._input_offset +=os.write(fd,chunk)\n except OSError as e:\n if e.errno ==errno.EPIPE:\n close_unregister_and_remove(fd)\n else :\n raise\n else :\n if self._input_offset >=len(self._input):\n close_unregister_and_remove(fd)\n elif mode&select_POLLIN_POLLPRI:\n data=os.read(fd,4096)\n if not data:\n close_unregister_and_remove(fd)\n self._fd2output[fd].append(data)\n else :\n \n close_unregister_and_remove(fd)\n \n return (stdout,stderr)\n \n \n def _communicate_with_select(self,input,endtime,orig_timeout):\n if not self._communication_started:\n self._read_set=[]\n self._write_set=[]\n if self.stdin and input:\n self._write_set.append(self.stdin)\n if self.stdout:\n self._read_set.append(self.stdout)\n if self.stderr:\n self._read_set.append(self.stderr)\n \n self._save_input(input)\n \n stdout=None\n stderr=None\n \n if self.stdout:\n if not self._communication_started:\n self._stdout_buff=[]\n stdout=self._stdout_buff\n if self.stderr:\n if not self._communication_started:\n self._stderr_buff=[]\n stderr=self._stderr_buff\n \n while self._read_set or self._write_set:\n timeout=self._remaining_time(endtime)\n if timeout is not None and timeout <0:\n raise TimeoutExpired(self.args,orig_timeout)\n try :\n (rlist,wlist,xlist)= select.select(self._read_set,self._write_set,[],\n timeout)\n except select.error as e:\n if e.args[0]==errno.EINTR:\n continue\n raise\n \n \n \n if not (rlist or wlist or xlist):\n raise TimeoutExpired(self.args,orig_timeout)\n \n self._check_timeout(endtime,orig_timeout)\n \n \n \n \n if self.stdin in wlist:\n chunk=self._input[self._input_offset:\n self._input_offset+_PIPE_BUF]\n try :\n bytes_written=os.write(self.stdin.fileno(),chunk)\n except OSError as e:\n if e.errno ==errno.EPIPE:\n self.stdin.close()\n self._write_set.remove(self.stdin)\n else :\n raise\n else :\n self._input_offset +=bytes_written\n if self._input_offset >=len(self._input):\n self.stdin.close()\n self._write_set.remove(self.stdin)\n \n if self.stdout in rlist:\n data=os.read(self.stdout.fileno(),1024)\n if not data:\n self.stdout.close()\n self._read_set.remove(self.stdout)\n stdout.append(data)\n \n if self.stderr in rlist:\n data=os.read(self.stderr.fileno(),1024)\n if not data:\n self.stderr.close()\n self._read_set.remove(self.stderr)\n stderr.append(data)\n \n return (stdout,stderr)\n \n \n def send_signal(self,sig):\n ''\n \n os.kill(self.pid,sig)\n \n def terminate(self):\n ''\n \n self.send_signal(signal.SIGTERM)\n \n def kill(self):\n ''\n \n self.send_signal(signal.SIGKILL)\n"], "_string": [".py", "''\n\nimport re\n\nclass __loader__(object):\n pass\n \ndef formatter_field_name_split(fieldname):\n ''\n _list=[]\n for _name in fieldname:\n _parts=_name.split('.')\n for _item in _parts:\n is_attr=False\n if re.match('\\d+',_item):\n _list.append((int(_item),is_attr))\n else :\n _list.append((_item,is_attr))\n \n return _list[0][0],iter(_list[1:])\n \ndef formatter_parser(*args,**kw):\n ''\n \n assert len(args)==1\n assert isinstance(args[0],str)\n \n _result=[]\n for _match in re.finditer(\"([^{]*)?(\\{[^}]*\\})?\",args[0]):\n _pre,_fmt=_match.groups()\n if _fmt is None :\n _result.append((_pre,None ,None ,None ))\n elif _fmt =='{}':\n _result.append((_pre,'','',None ))\n else :\n _m=re.match(\"\\{([^!]*)!?(.*)?\\}\",_fmt)\n _name=_m.groups(0)\n _flags=_m.groups(1)\n \n _result.append((_pre,_name,_flags,None ))\n \n return _result\n"], "encodings.ptcp154": [".py", "''\n\n\n\n\n\n\n\n\nimport codecs\n\n\n\nclass Codec(codecs.Codec):\n\n def encode(self,input,errors='strict'):\n return codecs.charmap_encode(input,errors,encoding_table)\n \n def decode(self,input,errors='strict'):\n return codecs.charmap_decode(input,errors,decoding_table)\n \nclass IncrementalEncoder(codecs.IncrementalEncoder):\n def encode(self,input,final=False ):\n return codecs.charmap_encode(input,self.errors,encoding_table)[0]\n \nclass IncrementalDecoder(codecs.IncrementalDecoder):\n def decode(self,input,final=False ):\n return codecs.charmap_decode(input,self.errors,decoding_table)[0]\n \nclass StreamWriter(Codec,codecs.StreamWriter):\n pass\n \nclass StreamReader(Codec,codecs.StreamReader):\n pass\n \n \n \ndef getregentry():\n return codecs.CodecInfo(\n name='ptcp154',\n encode=Codec().encode,\n decode=Codec().decode,\n incrementalencoder=IncrementalEncoder,\n incrementaldecoder=IncrementalDecoder,\n streamreader=StreamReader,\n streamwriter=StreamWriter,\n )\n \n \n \n \ndecoding_table=(\n'\\x00'\n'\\x01'\n'\\x02'\n'\\x03'\n'\\x04'\n'\\x05'\n'\\x06'\n'\\x07'\n'\\x08'\n'\\t'\n'\\n'\n'\\x0b'\n'\\x0c'\n'\\r'\n'\\x0e'\n'\\x0f'\n'\\x10'\n'\\x11'\n'\\x12'\n'\\x13'\n'\\x14'\n'\\x15'\n'\\x16'\n'\\x17'\n'\\x18'\n'\\x19'\n'\\x1a'\n'\\x1b'\n'\\x1c'\n'\\x1d'\n'\\x1e'\n'\\x1f'\n' '\n'!'\n'\"'\n'#'\n'$'\n'%'\n'&'\n\"'\"\n'('\n')'\n'*'\n'+'\n','\n'-'\n'.'\n'/'\n'0'\n'1'\n'2'\n'3'\n'4'\n'5'\n'6'\n'7'\n'8'\n'9'\n':'\n';'\n'<'\n'='\n'>'\n'?'\n'@'\n'A'\n'B'\n'C'\n'D'\n'E'\n'F'\n'G'\n'H'\n'I'\n'J'\n'K'\n'L'\n'M'\n'N'\n'O'\n'P'\n'Q'\n'R'\n'S'\n'T'\n'U'\n'V'\n'W'\n'X'\n'Y'\n'Z'\n'['\n'\\\\'\n']'\n'^'\n'_'\n'`'\n'a'\n'b'\n'c'\n'd'\n'e'\n'f'\n'g'\n'h'\n'i'\n'j'\n'k'\n'l'\n'm'\n'n'\n'o'\n'p'\n'q'\n'r'\n's'\n't'\n'u'\n'v'\n'w'\n'x'\n'y'\n'z'\n'{'\n'|'\n'}'\n'~'\n'\\x7f'\n'\\u0496'\n'\\u0492'\n'\\u04ee'\n'\\u0493'\n'\\u201e'\n'\\u2026'\n'\\u04b6'\n'\\u04ae'\n'\\u04b2'\n'\\u04af'\n'\\u04a0'\n'\\u04e2'\n'\\u04a2'\n'\\u049a'\n'\\u04ba'\n'\\u04b8'\n'\\u0497'\n'\\u2018'\n'\\u2019'\n'\\u201c'\n'\\u201d'\n'\\u2022'\n'\\u2013'\n'\\u2014'\n'\\u04b3'\n'\\u04b7'\n'\\u04a1'\n'\\u04e3'\n'\\u04a3'\n'\\u049b'\n'\\u04bb'\n'\\u04b9'\n'\\xa0'\n'\\u040e'\n'\\u045e'\n'\\u0408'\n'\\u04e8'\n'\\u0498'\n'\\u04b0'\n'\\xa7'\n'\\u0401'\n'\\xa9'\n'\\u04d8'\n'\\xab'\n'\\xac'\n'\\u04ef'\n'\\xae'\n'\\u049c'\n'\\xb0'\n'\\u04b1'\n'\\u0406'\n'\\u0456'\n'\\u0499'\n'\\u04e9'\n'\\xb6'\n'\\xb7'\n'\\u0451'\n'\\u2116'\n'\\u04d9'\n'\\xbb'\n'\\u0458'\n'\\u04aa'\n'\\u04ab'\n'\\u049d'\n'\\u0410'\n'\\u0411'\n'\\u0412'\n'\\u0413'\n'\\u0414'\n'\\u0415'\n'\\u0416'\n'\\u0417'\n'\\u0418'\n'\\u0419'\n'\\u041a'\n'\\u041b'\n'\\u041c'\n'\\u041d'\n'\\u041e'\n'\\u041f'\n'\\u0420'\n'\\u0421'\n'\\u0422'\n'\\u0423'\n'\\u0424'\n'\\u0425'\n'\\u0426'\n'\\u0427'\n'\\u0428'\n'\\u0429'\n'\\u042a'\n'\\u042b'\n'\\u042c'\n'\\u042d'\n'\\u042e'\n'\\u042f'\n'\\u0430'\n'\\u0431'\n'\\u0432'\n'\\u0433'\n'\\u0434'\n'\\u0435'\n'\\u0436'\n'\\u0437'\n'\\u0438'\n'\\u0439'\n'\\u043a'\n'\\u043b'\n'\\u043c'\n'\\u043d'\n'\\u043e'\n'\\u043f'\n'\\u0440'\n'\\u0441'\n'\\u0442'\n'\\u0443'\n'\\u0444'\n'\\u0445'\n'\\u0446'\n'\\u0447'\n'\\u0448'\n'\\u0449'\n'\\u044a'\n'\\u044b'\n'\\u044c'\n'\\u044d'\n'\\u044e'\n'\\u044f'\n)\n\n\nencoding_table=codecs.charmap_build(decoding_table)\n"], "codecs": [".py", "''\n\n\n\n\n\n\n\n\nimport builtins,sys\n\n\n\ntry :\n from _codecs import *\nexcept ImportError as why:\n raise SystemError('Failed to load the builtin codecs: %s'%why)\n \n__all__=[\"register\",\"lookup\",\"open\",\"EncodedFile\",\"BOM\",\"BOM_BE\",\n\"BOM_LE\",\"BOM32_BE\",\"BOM32_LE\",\"BOM64_BE\",\"BOM64_LE\",\n\"BOM_UTF8\",\"BOM_UTF16\",\"BOM_UTF16_LE\",\"BOM_UTF16_BE\",\n\"BOM_UTF32\",\"BOM_UTF32_LE\",\"BOM_UTF32_BE\",\n\"strict_errors\",\"ignore_errors\",\"replace_errors\",\n\"xmlcharrefreplace_errors\",\n\"register_error\",\"lookup_error\"]\n\n\n\n\n\n\n\n\n\n\nBOM_UTF8=b'\\xef\\xbb\\xbf'\n\n\nBOM_LE=BOM_UTF16_LE=b'\\xff\\xfe'\n\n\nBOM_BE=BOM_UTF16_BE=b'\\xfe\\xff'\n\n\nBOM_UTF32_LE=b'\\xff\\xfe\\x00\\x00'\n\n\nBOM_UTF32_BE=b'\\x00\\x00\\xfe\\xff'\n\nif sys.byteorder =='little':\n\n\n BOM=BOM_UTF16=BOM_UTF16_LE\n \n \n BOM_UTF32=BOM_UTF32_LE\n \nelse :\n\n\n BOM=BOM_UTF16=BOM_UTF16_BE\n \n \n BOM_UTF32=BOM_UTF32_BE\n \n \nBOM32_LE=BOM_UTF16_LE\nBOM32_BE=BOM_UTF16_BE\nBOM64_LE=BOM_UTF32_LE\nBOM64_BE=BOM_UTF32_BE\n\n\n\n\nclass CodecInfo(tuple):\n\n def __new__(cls,encode,decode,streamreader=None ,streamwriter=None ,\n incrementalencoder=None ,incrementaldecoder=None ,name=None ):\n self=tuple.__new__(cls,(encode,decode,streamreader,streamwriter))\n self.name=name\n self.encode=encode\n self.decode=decode\n self.incrementalencoder=incrementalencoder\n self.incrementaldecoder=incrementaldecoder\n self.streamwriter=streamwriter\n self.streamreader=streamreader\n return self\n \n def __repr__(self):\n return\"<%s.%s object for encoding %s at 0x%x>\"% (self.__class__.__module__,self.__class__.__name__,\n self.name,id(self))\n \nclass Codec:\n\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n def encode(self,input,errors='strict'):\n \n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n raise NotImplementedError\n \n def decode(self,input,errors='strict'):\n \n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n raise NotImplementedError\n \nclass IncrementalEncoder(object):\n ''\n\n\n\n \n def __init__(self,errors='strict'):\n ''\n\n\n\n\n\n \n self.errors=errors\n self.buffer=\"\"\n \n def encode(self,input,final=False ):\n ''\n\n \n raise NotImplementedError\n \n def reset(self):\n ''\n\n \n \n def getstate(self):\n ''\n\n \n return 0\n \n def setstate(self,state):\n ''\n\n\n \n \nclass BufferedIncrementalEncoder(IncrementalEncoder):\n ''\n\n\n\n \n def __init__(self,errors='strict'):\n IncrementalEncoder.__init__(self,errors)\n \n self.buffer=\"\"\n \n def _buffer_encode(self,input,errors,final):\n \n \n raise NotImplementedError\n \n def encode(self,input,final=False ):\n \n data=self.buffer+input\n (result,consumed)=self._buffer_encode(data,self.errors,final)\n \n self.buffer=data[consumed:]\n return result\n \n def reset(self):\n IncrementalEncoder.reset(self)\n self.buffer=\"\"\n \n def getstate(self):\n return self.buffer or 0\n \n def setstate(self,state):\n self.buffer=state or\"\"\n \nclass IncrementalDecoder(object):\n ''\n\n\n\n \n def __init__(self,errors='strict'):\n ''\n\n\n\n\n\n \n self.errors=errors\n \n def decode(self,input,final=False ):\n ''\n\n \n raise NotImplementedError\n \n def reset(self):\n ''\n\n \n \n def getstate(self):\n ''\n\n\n\n\n\n\n\n\n\n \n return (b\"\",0)\n \n def setstate(self,state):\n ''\n\n\n\n\n \n \nclass BufferedIncrementalDecoder(IncrementalDecoder):\n ''\n\n\n\n \n def __init__(self,errors='strict'):\n IncrementalDecoder.__init__(self,errors)\n \n self.buffer=b\"\"\n \n def _buffer_decode(self,input,errors,final):\n \n \n raise NotImplementedError\n \n def decode(self,input,final=False ):\n \n data=self.buffer+input\n (result,consumed)=self._buffer_decode(data,self.errors,final)\n \n self.buffer=data[consumed:]\n return result\n \n def reset(self):\n IncrementalDecoder.reset(self)\n self.buffer=b\"\"\n \n def getstate(self):\n \n return (self.buffer,0)\n \n def setstate(self,state):\n \n self.buffer=state[0]\n \n \n \n \n \n \n \n \nclass StreamWriter(Codec):\n\n def __init__(self,stream,errors='strict'):\n \n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n self.stream=stream\n self.errors=errors\n \n def write(self,object):\n \n ''\n \n data,consumed=self.encode(object,self.errors)\n self.stream.write(data)\n \n def writelines(self,list):\n \n ''\n\n \n self.write(''.join(list))\n \n def reset(self):\n \n ''\n\n\n\n\n\n\n \n pass\n \n def seek(self,offset,whence=0):\n self.stream.seek(offset,whence)\n if whence ==0 and offset ==0:\n self.reset()\n \n def __getattr__(self,name,\n getattr=getattr):\n \n ''\n \n return getattr(self.stream,name)\n \n def __enter__(self):\n return self\n \n def __exit__(self,type,value,tb):\n self.stream.close()\n \n \n \nclass StreamReader(Codec):\n\n charbuffertype=str\n \n def __init__(self,stream,errors='strict'):\n \n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n self.stream=stream\n self.errors=errors\n self.bytebuffer=b\"\"\n self._empty_charbuffer=self.charbuffertype()\n self.charbuffer=self._empty_charbuffer\n self.linebuffer=None\n \n def decode(self,input,errors='strict'):\n raise NotImplementedError\n \n def read(self,size=-1,chars=-1,firstline=False ):\n \n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n \n if self.linebuffer:\n self.charbuffer=self._empty_charbuffer.join(self.linebuffer)\n self.linebuffer=None\n \n \n while True :\n \n if chars <0:\n if size <0:\n if self.charbuffer:\n break\n elif len(self.charbuffer)>=size:\n break\n else :\n if len(self.charbuffer)>=chars:\n break\n \n if size <0:\n newdata=self.stream.read()\n else :\n newdata=self.stream.read(size)\n \n data=self.bytebuffer+newdata\n try :\n newchars,decodedbytes=self.decode(data,self.errors)\n except UnicodeDecodeError as exc:\n if firstline:\n newchars,decodedbytes= self.decode(data[:exc.start],self.errors)\n lines=newchars.splitlines(keepends=True )\n if len(lines)<=1:\n raise\n else :\n raise\n \n self.bytebuffer=data[decodedbytes:]\n \n self.charbuffer +=newchars\n \n if not newdata:\n break\n if chars <0:\n \n result=self.charbuffer\n self.charbuffer=self._empty_charbuffer\n else :\n \n result=self.charbuffer[:chars]\n self.charbuffer=self.charbuffer[chars:]\n return result\n \n def readline(self,size=None ,keepends=True ):\n \n ''\n\n\n\n\n\n \n \n \n if self.linebuffer:\n line=self.linebuffer[0]\n del self.linebuffer[0]\n if len(self.linebuffer)==1:\n \n \n self.charbuffer=self.linebuffer[0]\n self.linebuffer=None\n if not keepends:\n line=line.splitlines(keepends=False )[0]\n return line\n \n readsize=size or 72\n line=self._empty_charbuffer\n \n while True :\n data=self.read(readsize,firstline=True )\n if data:\n \n \n \n if (isinstance(data,str)and data.endswith(\"\\r\"))or (isinstance(data,bytes)and data.endswith(b\"\\r\")):\n data +=self.read(size=1,chars=1)\n \n line +=data\n lines=line.splitlines(keepends=True )\n if lines:\n if len(lines)>1:\n \n \n line=lines[0]\n del lines[0]\n if len(lines)>1:\n \n lines[-1]+=self.charbuffer\n self.linebuffer=lines\n self.charbuffer=None\n else :\n \n self.charbuffer=lines[0]+self.charbuffer\n if not keepends:\n line=line.splitlines(keepends=False )[0]\n break\n line0withend=lines[0]\n line0withoutend=lines[0].splitlines(keepends=False )[0]\n if line0withend !=line0withoutend:\n \n self.charbuffer=self._empty_charbuffer.join(lines[1:])+ self.charbuffer\n if keepends:\n line=line0withend\n else :\n line=line0withoutend\n break\n \n if not data or size is not None :\n if line and not keepends:\n line=line.splitlines(keepends=False )[0]\n break\n if readsize <8000:\n readsize *=2\n return line\n \n def readlines(self,sizehint=None ,keepends=True ):\n \n ''\n\n\n\n\n\n\n\n\n \n data=self.read()\n return data.splitlines(keepends)\n \n def reset(self):\n \n ''\n\n\n\n\n\n \n self.bytebuffer=b\"\"\n self.charbuffer=self._empty_charbuffer\n self.linebuffer=None\n \n def seek(self,offset,whence=0):\n ''\n\n\n \n self.stream.seek(offset,whence)\n self.reset()\n \n def __next__(self):\n \n ''\n line=self.readline()\n if line:\n return line\n raise StopIteration\n \n def __iter__(self):\n return self\n \n def __getattr__(self,name,\n getattr=getattr):\n \n ''\n \n return getattr(self.stream,name)\n \n def __enter__(self):\n return self\n \n def __exit__(self,type,value,tb):\n self.stream.close()\n \n \n \nclass StreamReaderWriter:\n\n ''\n\n\n\n\n\n\n \n \n encoding='unknown'\n \n def __init__(self,stream,Reader,Writer,errors='strict'):\n \n ''\n\n\n\n\n\n\n\n\n\n \n self.stream=stream\n self.reader=Reader(stream,errors)\n self.writer=Writer(stream,errors)\n self.errors=errors\n \n def read(self,size=-1):\n \n return self.reader.read(size)\n \n def readline(self,size=None ):\n \n return self.reader.readline(size)\n \n def readlines(self,sizehint=None ):\n \n return self.reader.readlines(sizehint)\n \n def __next__(self):\n \n ''\n return next(self.reader)\n \n def __iter__(self):\n return self\n \n def write(self,data):\n \n return self.writer.write(data)\n \n def writelines(self,list):\n \n return self.writer.writelines(list)\n \n def reset(self):\n \n self.reader.reset()\n self.writer.reset()\n \n def seek(self,offset,whence=0):\n self.stream.seek(offset,whence)\n self.reader.reset()\n if whence ==0 and offset ==0:\n self.writer.reset()\n \n def __getattr__(self,name,\n getattr=getattr):\n \n ''\n \n return getattr(self.stream,name)\n \n \n \n def __enter__(self):\n return self\n \n def __exit__(self,type,value,tb):\n self.stream.close()\n \n \n \nclass StreamRecoder:\n\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n \n data_encoding='unknown'\n file_encoding='unknown'\n \n def __init__(self,stream,encode,decode,Reader,Writer,\n errors='strict'):\n \n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n self.stream=stream\n self.encode=encode\n self.decode=decode\n self.reader=Reader(stream,errors)\n self.writer=Writer(stream,errors)\n self.errors=errors\n \n def read(self,size=-1):\n \n data=self.reader.read(size)\n data,bytesencoded=self.encode(data,self.errors)\n return data\n \n def readline(self,size=None ):\n \n if size is None :\n data=self.reader.readline()\n else :\n data=self.reader.readline(size)\n data,bytesencoded=self.encode(data,self.errors)\n return data\n \n def readlines(self,sizehint=None ):\n \n data=self.reader.read()\n data,bytesencoded=self.encode(data,self.errors)\n return data.splitlines(keepends=True )\n \n def __next__(self):\n \n ''\n data=next(self.reader)\n data,bytesencoded=self.encode(data,self.errors)\n return data\n \n def __iter__(self):\n return self\n \n def write(self,data):\n \n data,bytesdecoded=self.decode(data,self.errors)\n return self.writer.write(data)\n \n def writelines(self,list):\n \n data=''.join(list)\n data,bytesdecoded=self.decode(data,self.errors)\n return self.writer.write(data)\n \n def reset(self):\n \n self.reader.reset()\n self.writer.reset()\n \n def __getattr__(self,name,\n getattr=getattr):\n \n ''\n \n return getattr(self.stream,name)\n \n def __enter__(self):\n return self\n \n def __exit__(self,type,value,tb):\n self.stream.close()\n \n \n \ndef open(filename,mode='rb',encoding=None ,errors='strict',buffering=1):\n\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n if encoding is not None and 'b'not in mode:\n \n mode=mode+'b'\n file=builtins.open(filename,mode,buffering)\n if encoding is None :\n return file\n info=lookup(encoding)\n srw=StreamReaderWriter(file,info.streamreader,info.streamwriter,errors)\n \n srw.encoding=encoding\n return srw\n \ndef EncodedFile(file,data_encoding,file_encoding=None ,errors='strict'):\n\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n if file_encoding is None :\n file_encoding=data_encoding\n data_info=lookup(data_encoding)\n file_info=lookup(file_encoding)\n sr=StreamRecoder(file,data_info.encode,data_info.decode,\n file_info.streamreader,file_info.streamwriter,errors)\n \n sr.data_encoding=data_encoding\n sr.file_encoding=file_encoding\n return sr\n \n \n \ndef getencoder(encoding):\n\n ''\n\n\n\n\n \n return lookup(encoding).encode\n \ndef getdecoder(encoding):\n\n ''\n\n\n\n\n \n return lookup(encoding).decode\n \ndef getincrementalencoder(encoding):\n\n ''\n\n\n\n\n\n \n encoder=lookup(encoding).incrementalencoder\n if encoder is None :\n raise LookupError(encoding)\n return encoder\n \ndef getincrementaldecoder(encoding):\n\n ''\n\n\n\n\n\n \n decoder=lookup(encoding).incrementaldecoder\n if decoder is None :\n raise LookupError(encoding)\n return decoder\n \ndef getreader(encoding):\n\n ''\n\n\n\n\n \n return lookup(encoding).streamreader\n \ndef getwriter(encoding):\n\n ''\n\n\n\n\n \n return lookup(encoding).streamwriter\n \ndef iterencode(iterator,encoding,errors='strict',**kwargs):\n ''\n\n\n\n\n\n\n \n encoder=getincrementalencoder(encoding)(errors,**kwargs)\n for input in iterator:\n output=encoder.encode(input)\n if output:\n yield output\n output=encoder.encode(\"\",True )\n if output:\n yield output\n \ndef iterdecode(iterator,encoding,errors='strict',**kwargs):\n ''\n\n\n\n\n\n\n \n decoder=getincrementaldecoder(encoding)(errors,**kwargs)\n for input in iterator:\n output=decoder.decode(input)\n if output:\n yield output\n output=decoder.decode(b\"\",True )\n if output:\n yield output\n \n \n \ndef make_identity_dict(rng):\n\n ''\n\n\n\n\n \n return {i:i for i in rng}\n \ndef make_encoding_map(decoding_map):\n\n ''\n\n\n\n\n\n\n\n\n\n \n m={}\n for k,v in decoding_map.items():\n if not v in m:\n m[v]=k\n else :\n m[v]=None\n return m\n \n \n \ntry :\n strict_errors=lookup_error(\"strict\")\n ignore_errors=lookup_error(\"ignore\")\n replace_errors=lookup_error(\"replace\")\n xmlcharrefreplace_errors=lookup_error(\"xmlcharrefreplace\")\n backslashreplace_errors=lookup_error(\"backslashreplace\")\nexcept LookupError:\n\n strict_errors=None\n ignore_errors=None\n replace_errors=None\n xmlcharrefreplace_errors=None\n backslashreplace_errors=None\n \n \n \n_false=0\nif _false:\n import encodings\n \n \n \nif __name__ =='__main__':\n\n\n sys.stdout=EncodedFile(sys.stdout,'latin-1','utf-8')\n \n \n sys.stdin=EncodedFile(sys.stdin,'utf-8','latin-1')\n"], "code": [".py", "''\n\n\n\n\n\n\nimport sys\nimport traceback\nfrom codeop import CommandCompiler,compile_command\n\n__all__=[\"InteractiveInterpreter\",\"InteractiveConsole\",\"interact\",\n\"compile_command\"]\n\nclass InteractiveInterpreter:\n ''\n\n\n\n\n\n \n \n def __init__(self,locals=None ):\n ''\n\n\n\n\n\n\n \n if locals is None :\n locals={\"__name__\":\"__console__\",\"__doc__\":None }\n self.locals=locals\n self.compile=CommandCompiler()\n \n def runsource(self,source,filename=\"\",symbol=\"single\"):\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n try :\n code=self.compile(source,filename,symbol)\n except (OverflowError,SyntaxError,ValueError):\n \n self.showsyntaxerror(filename)\n return False\n \n if code is None :\n \n return True\n \n \n self.runcode(code)\n return False\n \n def runcode(self,code):\n ''\n\n\n\n\n\n\n\n\n\n \n try :\n exec(code,self.locals)\n except SystemExit:\n raise\n except :\n self.showtraceback()\n \n def showsyntaxerror(self,filename=None ):\n ''\n\n\n\n\n\n\n\n\n\n \n type,value,tb=sys.exc_info()\n sys.last_type=type\n sys.last_value=value\n sys.last_traceback=tb\n if filename and type is SyntaxError:\n \n try :\n msg,(dummy_filename,lineno,offset,line)=value.args\n except ValueError:\n \n pass\n else :\n \n value=SyntaxError(msg,(filename,lineno,offset,line))\n sys.last_value=value\n if sys.excepthook is sys.__excepthook__:\n lines=traceback.format_exception_only(type,value)\n self.write(''.join(lines))\n else :\n \n \n sys.excepthook(type,value,tb)\n \n def showtraceback(self):\n ''\n\n\n\n\n\n \n try :\n type,value,tb=sys.exc_info()\n sys.last_type=type\n sys.last_value=value\n sys.last_traceback=tb\n tblist=traceback.extract_tb(tb)\n del tblist[:1]\n lines=traceback.format_list(tblist)\n if lines:\n lines.insert(0,\"Traceback (most recent call last):\\n\")\n lines.extend(traceback.format_exception_only(type,value))\n finally :\n tblist=tb=None\n if sys.excepthook is sys.__excepthook__:\n self.write(''.join(lines))\n else :\n \n \n sys.excepthook(type,value,tb)\n \n def write(self,data):\n ''\n\n\n\n\n \n sys.stderr.write(data)\n \n \nclass InteractiveConsole(InteractiveInterpreter):\n ''\n\n\n\n\n \n \n def __init__(self,locals=None ,filename=\"\"):\n ''\n\n\n\n\n\n\n\n \n InteractiveInterpreter.__init__(self,locals)\n self.filename=filename\n self.resetbuffer()\n \n def resetbuffer(self):\n ''\n self.buffer=[]\n \n def interact(self,banner=None ):\n ''\n\n\n\n\n\n\n\n\n \n try :\n sys.ps1\n except AttributeError:\n sys.ps1=\">>> \"\n try :\n sys.ps2\n except AttributeError:\n sys.ps2=\"... \"\n cprt='Type \"help\", \"copyright\", \"credits\" or \"license\" for more information.'\n if banner is None :\n self.write(\"Python %s on %s\\n%s\\n(%s)\\n\"%\n (sys.version,sys.platform,cprt,\n self.__class__.__name__))\n elif banner:\n self.write(\"%s\\n\"%str(banner))\n more=0\n while 1:\n try :\n if more:\n prompt=sys.ps2\n else :\n prompt=sys.ps1\n try :\n line=self.raw_input(prompt)\n except EOFError:\n self.write(\"\\n\")\n break\n else :\n more=self.push(line)\n except KeyboardInterrupt:\n self.write(\"\\nKeyboardInterrupt\\n\")\n self.resetbuffer()\n more=0\n \n def push(self,line):\n ''\n\n\n\n\n\n\n\n\n\n\n\n \n self.buffer.append(line)\n source=\"\\n\".join(self.buffer)\n more=self.runsource(source,self.filename)\n if not more:\n self.resetbuffer()\n return more\n \n def raw_input(self,prompt=\"\"):\n ''\n\n\n\n\n\n\n\n\n \n return input(prompt)\n \n \n \ndef interact(banner=None ,readfunc=None ,local=None ):\n ''\n\n\n\n\n\n\n\n\n\n\n\n \n console=InteractiveConsole(local)\n if readfunc is not None :\n console.raw_input=readfunc\n else :\n try :\n import readline\n except ImportError:\n pass\n console.interact(banner)\n \n \nif __name__ ==\"__main__\":\n interact()\n"], "browser.timer": [".py", "from browser import window\n\ndef wrap(func):\n\n\n def f(*args,**kw):\n try :\n return func(*args,**kw)\n except Exception as exc:\n msg='{0.info}\\n{0.__name__}: {0.args[0]}'.format(exc)\n import sys\n sys.stderr.write(msg)\n return f\n \nclear_interval=window.clearInterval\n\nclear_timeout=window.clearTimeout\n\ndef set_interval(func,interval):\n return window.setInterval(wrap(func),interval)\n \ndef set_timeout(func,interval):\n return int(window.setTimeout(wrap(func),interval))\n \ndef request_animation_frame(func):\n return int(window.requestAnimationFrame(func))\n \ndef cancel_animation_frame(int_id):\n window.cancelAnimationFrame(int_id)\n \ndef set_loop_timeout(x):\n\n assert isinstance(x,int)\n __BRYTHON__.loop_timeout=x"], "asyncio.windows_utils": [".py", "''\n\n\n\nimport sys\n\nif sys.platform !='win32':\n raise ImportError('win32 only')\n \nimport _winapi\nimport itertools\nimport msvcrt\nimport os\nimport socket\nimport subprocess\nimport tempfile\nimport warnings\n\n\n__all__=['socketpair','pipe','Popen','PIPE','PipeHandle']\n\n\n\n\n\nBUFSIZE=8192\nPIPE=subprocess.PIPE\nSTDOUT=subprocess.STDOUT\n_mmap_counter=itertools.count()\n\n\nif hasattr(socket,'socketpair'):\n\n socketpair=socket.socketpair\nelse :\n\n def socketpair(family=socket.AF_INET,type=socket.SOCK_STREAM,proto=0):\n ''\n\n\n\n \n if family ==socket.AF_INET:\n host='127.0.0.1'\n elif family ==socket.AF_INET6:\n host='::1'\n else :\n raise ValueError(\"Only AF_INET and AF_INET6 socket address \"\n \"families are supported\")\n if type !=socket.SOCK_STREAM:\n raise ValueError(\"Only SOCK_STREAM socket type is supported\")\n if proto !=0:\n raise ValueError(\"Only protocol zero is supported\")\n \n \n \n lsock=socket.socket(family,type,proto)\n try :\n lsock.bind((host,0))\n lsock.listen(1)\n \n addr,port=lsock.getsockname()[:2]\n csock=socket.socket(family,type,proto)\n try :\n csock.setblocking(False )\n try :\n csock.connect((addr,port))\n except (BlockingIOError,InterruptedError):\n pass\n csock.setblocking(True )\n ssock,_=lsock.accept()\n except :\n csock.close()\n raise\n finally :\n lsock.close()\n return (ssock,csock)\n \n \n \n \n \ndef pipe(*,duplex=False ,overlapped=(True ,True ),bufsize=BUFSIZE):\n ''\n address=tempfile.mktemp(prefix=r'\\\\.\\pipe\\python-pipe-%d-%d-'%\n (os.getpid(),next(_mmap_counter)))\n \n if duplex:\n openmode=_winapi.PIPE_ACCESS_DUPLEX\n access=_winapi.GENERIC_READ |_winapi.GENERIC_WRITE\n obsize,ibsize=bufsize,bufsize\n else :\n openmode=_winapi.PIPE_ACCESS_INBOUND\n access=_winapi.GENERIC_WRITE\n obsize,ibsize=0,bufsize\n \n openmode |=_winapi.FILE_FLAG_FIRST_PIPE_INSTANCE\n \n if overlapped[0]:\n openmode |=_winapi.FILE_FLAG_OVERLAPPED\n \n if overlapped[1]:\n flags_and_attribs=_winapi.FILE_FLAG_OVERLAPPED\n else :\n flags_and_attribs=0\n \n h1=h2=None\n try :\n h1=_winapi.CreateNamedPipe(\n address,openmode,_winapi.PIPE_WAIT,\n 1,obsize,ibsize,_winapi.NMPWAIT_WAIT_FOREVER,_winapi.NULL)\n \n h2=_winapi.CreateFile(\n address,access,0,_winapi.NULL,_winapi.OPEN_EXISTING,\n flags_and_attribs,_winapi.NULL)\n \n ov=_winapi.ConnectNamedPipe(h1,overlapped=True )\n ov.GetOverlappedResult(True )\n return h1,h2\n except :\n if h1 is not None :\n _winapi.CloseHandle(h1)\n if h2 is not None :\n _winapi.CloseHandle(h2)\n raise\n \n \n \n \n \nclass PipeHandle:\n ''\n\n\n \n def __init__(self,handle):\n self._handle=handle\n \n def __repr__(self):\n if self._handle is not None :\n handle='handle=%r'%self._handle\n else :\n handle='closed'\n return'<%s %s>'%(self.__class__.__name__,handle)\n \n @property\n def handle(self):\n return self._handle\n \n def fileno(self):\n if self._handle is None :\n raise ValueError(\"I/O operatioon on closed pipe\")\n return self._handle\n \n def close(self,*,CloseHandle=_winapi.CloseHandle):\n if self._handle is not None :\n CloseHandle(self._handle)\n self._handle=None\n \n def __del__(self):\n if self._handle is not None :\n warnings.warn(\"unclosed %r\"%self,ResourceWarning)\n self.close()\n \n def __enter__(self):\n return self\n \n def __exit__(self,t,v,tb):\n self.close()\n \n \n \n \n \nclass Popen(subprocess.Popen):\n ''\n\n\n \n def __init__(self,args,stdin=None ,stdout=None ,stderr=None ,**kwds):\n assert not kwds.get('universal_newlines')\n assert kwds.get('bufsize',0)==0\n stdin_rfd=stdout_wfd=stderr_wfd=None\n stdin_wh=stdout_rh=stderr_rh=None\n if stdin ==PIPE:\n stdin_rh,stdin_wh=pipe(overlapped=(False ,True ),duplex=True )\n stdin_rfd=msvcrt.open_osfhandle(stdin_rh,os.O_RDONLY)\n else :\n stdin_rfd=stdin\n if stdout ==PIPE:\n stdout_rh,stdout_wh=pipe(overlapped=(True ,False ))\n stdout_wfd=msvcrt.open_osfhandle(stdout_wh,0)\n else :\n stdout_wfd=stdout\n if stderr ==PIPE:\n stderr_rh,stderr_wh=pipe(overlapped=(True ,False ))\n stderr_wfd=msvcrt.open_osfhandle(stderr_wh,0)\n elif stderr ==STDOUT:\n stderr_wfd=stdout_wfd\n else :\n stderr_wfd=stderr\n try :\n super().__init__(args,stdin=stdin_rfd,stdout=stdout_wfd,\n stderr=stderr_wfd,**kwds)\n except :\n for h in (stdin_wh,stdout_rh,stderr_rh):\n if h is not None :\n _winapi.CloseHandle(h)\n raise\n else :\n if stdin_wh is not None :\n self.stdin=PipeHandle(stdin_wh)\n if stdout_rh is not None :\n self.stdout=PipeHandle(stdout_rh)\n if stderr_rh is not None :\n self.stderr=PipeHandle(stderr_rh)\n finally :\n if stdin ==PIPE:\n os.close(stdin_rfd)\n if stdout ==PIPE:\n os.close(stdout_wfd)\n if stderr ==PIPE:\n os.close(stderr_wfd)\n"], "json": [".js", "var $module = (function($B){\n\nreturn {\n loads : function(json_obj){\n return $B.jsobject2pyobject(JSON.parse(json_obj))\n },\n load : function(file_obj){\n return $module.loads(file_obj.$content);\n },\n dumps : function(obj){return JSON.stringify($B.pyobject2jsobject(obj))},\n}\n\n})(__BRYTHON__)\n"], "encodings.cp437": [".py", "''\n\n\n\nimport codecs\n\n\n\nclass Codec(codecs.Codec):\n\n def encode(self,input,errors='strict'):\n return codecs.charmap_encode(input,errors,encoding_map)\n \n def decode(self,input,errors='strict'):\n return codecs.charmap_decode(input,errors,decoding_table)\n \nclass IncrementalEncoder(codecs.IncrementalEncoder):\n def encode(self,input,final=False ):\n return codecs.charmap_encode(input,self.errors,encoding_map)[0]\n \nclass IncrementalDecoder(codecs.IncrementalDecoder):\n def decode(self,input,final=False ):\n return codecs.charmap_decode(input,self.errors,decoding_table)[0]\n \nclass StreamWriter(Codec,codecs.StreamWriter):\n pass\n \nclass StreamReader(Codec,codecs.StreamReader):\n pass\n \n \n \ndef getregentry():\n return codecs.CodecInfo(\n name='cp437',\n encode=Codec().encode,\n decode=Codec().decode,\n incrementalencoder=IncrementalEncoder,\n incrementaldecoder=IncrementalDecoder,\n streamreader=StreamReader,\n streamwriter=StreamWriter,\n )\n \n \n \ndecoding_map=codecs.make_identity_dict(range(256))\ndecoding_map.update({\n0x0080:0x00c7,\n0x0081:0x00fc,\n0x0082:0x00e9,\n0x0083:0x00e2,\n0x0084:0x00e4,\n0x0085:0x00e0,\n0x0086:0x00e5,\n0x0087:0x00e7,\n0x0088:0x00ea,\n0x0089:0x00eb,\n0x008a:0x00e8,\n0x008b:0x00ef,\n0x008c:0x00ee,\n0x008d:0x00ec,\n0x008e:0x00c4,\n0x008f:0x00c5,\n0x0090:0x00c9,\n0x0091:0x00e6,\n0x0092:0x00c6,\n0x0093:0x00f4,\n0x0094:0x00f6,\n0x0095:0x00f2,\n0x0096:0x00fb,\n0x0097:0x00f9,\n0x0098:0x00ff,\n0x0099:0x00d6,\n0x009a:0x00dc,\n0x009b:0x00a2,\n0x009c:0x00a3,\n0x009d:0x00a5,\n0x009e:0x20a7,\n0x009f:0x0192,\n0x00a0:0x00e1,\n0x00a1:0x00ed,\n0x00a2:0x00f3,\n0x00a3:0x00fa,\n0x00a4:0x00f1,\n0x00a5:0x00d1,\n0x00a6:0x00aa,\n0x00a7:0x00ba,\n0x00a8:0x00bf,\n0x00a9:0x2310,\n0x00aa:0x00ac,\n0x00ab:0x00bd,\n0x00ac:0x00bc,\n0x00ad:0x00a1,\n0x00ae:0x00ab,\n0x00af:0x00bb,\n0x00b0:0x2591,\n0x00b1:0x2592,\n0x00b2:0x2593,\n0x00b3:0x2502,\n0x00b4:0x2524,\n0x00b5:0x2561,\n0x00b6:0x2562,\n0x00b7:0x2556,\n0x00b8:0x2555,\n0x00b9:0x2563,\n0x00ba:0x2551,\n0x00bb:0x2557,\n0x00bc:0x255d,\n0x00bd:0x255c,\n0x00be:0x255b,\n0x00bf:0x2510,\n0x00c0:0x2514,\n0x00c1:0x2534,\n0x00c2:0x252c,\n0x00c3:0x251c,\n0x00c4:0x2500,\n0x00c5:0x253c,\n0x00c6:0x255e,\n0x00c7:0x255f,\n0x00c8:0x255a,\n0x00c9:0x2554,\n0x00ca:0x2569,\n0x00cb:0x2566,\n0x00cc:0x2560,\n0x00cd:0x2550,\n0x00ce:0x256c,\n0x00cf:0x2567,\n0x00d0:0x2568,\n0x00d1:0x2564,\n0x00d2:0x2565,\n0x00d3:0x2559,\n0x00d4:0x2558,\n0x00d5:0x2552,\n0x00d6:0x2553,\n0x00d7:0x256b,\n0x00d8:0x256a,\n0x00d9:0x2518,\n0x00da:0x250c,\n0x00db:0x2588,\n0x00dc:0x2584,\n0x00dd:0x258c,\n0x00de:0x2590,\n0x00df:0x2580,\n0x00e0:0x03b1,\n0x00e1:0x00df,\n0x00e2:0x0393,\n0x00e3:0x03c0,\n0x00e4:0x03a3,\n0x00e5:0x03c3,\n0x00e6:0x00b5,\n0x00e7:0x03c4,\n0x00e8:0x03a6,\n0x00e9:0x0398,\n0x00ea:0x03a9,\n0x00eb:0x03b4,\n0x00ec:0x221e,\n0x00ed:0x03c6,\n0x00ee:0x03b5,\n0x00ef:0x2229,\n0x00f0:0x2261,\n0x00f1:0x00b1,\n0x00f2:0x2265,\n0x00f3:0x2264,\n0x00f4:0x2320,\n0x00f5:0x2321,\n0x00f6:0x00f7,\n0x00f7:0x2248,\n0x00f8:0x00b0,\n0x00f9:0x2219,\n0x00fa:0x00b7,\n0x00fb:0x221a,\n0x00fc:0x207f,\n0x00fd:0x00b2,\n0x00fe:0x25a0,\n0x00ff:0x00a0,\n})\n\n\n\ndecoding_table=(\n'\\x00'\n'\\x01'\n'\\x02'\n'\\x03'\n'\\x04'\n'\\x05'\n'\\x06'\n'\\x07'\n'\\x08'\n'\\t'\n'\\n'\n'\\x0b'\n'\\x0c'\n'\\r'\n'\\x0e'\n'\\x0f'\n'\\x10'\n'\\x11'\n'\\x12'\n'\\x13'\n'\\x14'\n'\\x15'\n'\\x16'\n'\\x17'\n'\\x18'\n'\\x19'\n'\\x1a'\n'\\x1b'\n'\\x1c'\n'\\x1d'\n'\\x1e'\n'\\x1f'\n' '\n'!'\n'\"'\n'#'\n'$'\n'%'\n'&'\n\"'\"\n'('\n')'\n'*'\n'+'\n','\n'-'\n'.'\n'/'\n'0'\n'1'\n'2'\n'3'\n'4'\n'5'\n'6'\n'7'\n'8'\n'9'\n':'\n';'\n'<'\n'='\n'>'\n'?'\n'@'\n'A'\n'B'\n'C'\n'D'\n'E'\n'F'\n'G'\n'H'\n'I'\n'J'\n'K'\n'L'\n'M'\n'N'\n'O'\n'P'\n'Q'\n'R'\n'S'\n'T'\n'U'\n'V'\n'W'\n'X'\n'Y'\n'Z'\n'['\n'\\\\'\n']'\n'^'\n'_'\n'`'\n'a'\n'b'\n'c'\n'd'\n'e'\n'f'\n'g'\n'h'\n'i'\n'j'\n'k'\n'l'\n'm'\n'n'\n'o'\n'p'\n'q'\n'r'\n's'\n't'\n'u'\n'v'\n'w'\n'x'\n'y'\n'z'\n'{'\n'|'\n'}'\n'~'\n'\\x7f'\n'\\xc7'\n'\\xfc'\n'\\xe9'\n'\\xe2'\n'\\xe4'\n'\\xe0'\n'\\xe5'\n'\\xe7'\n'\\xea'\n'\\xeb'\n'\\xe8'\n'\\xef'\n'\\xee'\n'\\xec'\n'\\xc4'\n'\\xc5'\n'\\xc9'\n'\\xe6'\n'\\xc6'\n'\\xf4'\n'\\xf6'\n'\\xf2'\n'\\xfb'\n'\\xf9'\n'\\xff'\n'\\xd6'\n'\\xdc'\n'\\xa2'\n'\\xa3'\n'\\xa5'\n'\\u20a7'\n'\\u0192'\n'\\xe1'\n'\\xed'\n'\\xf3'\n'\\xfa'\n'\\xf1'\n'\\xd1'\n'\\xaa'\n'\\xba'\n'\\xbf'\n'\\u2310'\n'\\xac'\n'\\xbd'\n'\\xbc'\n'\\xa1'\n'\\xab'\n'\\xbb'\n'\\u2591'\n'\\u2592'\n'\\u2593'\n'\\u2502'\n'\\u2524'\n'\\u2561'\n'\\u2562'\n'\\u2556'\n'\\u2555'\n'\\u2563'\n'\\u2551'\n'\\u2557'\n'\\u255d'\n'\\u255c'\n'\\u255b'\n'\\u2510'\n'\\u2514'\n'\\u2534'\n'\\u252c'\n'\\u251c'\n'\\u2500'\n'\\u253c'\n'\\u255e'\n'\\u255f'\n'\\u255a'\n'\\u2554'\n'\\u2569'\n'\\u2566'\n'\\u2560'\n'\\u2550'\n'\\u256c'\n'\\u2567'\n'\\u2568'\n'\\u2564'\n'\\u2565'\n'\\u2559'\n'\\u2558'\n'\\u2552'\n'\\u2553'\n'\\u256b'\n'\\u256a'\n'\\u2518'\n'\\u250c'\n'\\u2588'\n'\\u2584'\n'\\u258c'\n'\\u2590'\n'\\u2580'\n'\\u03b1'\n'\\xdf'\n'\\u0393'\n'\\u03c0'\n'\\u03a3'\n'\\u03c3'\n'\\xb5'\n'\\u03c4'\n'\\u03a6'\n'\\u0398'\n'\\u03a9'\n'\\u03b4'\n'\\u221e'\n'\\u03c6'\n'\\u03b5'\n'\\u2229'\n'\\u2261'\n'\\xb1'\n'\\u2265'\n'\\u2264'\n'\\u2320'\n'\\u2321'\n'\\xf7'\n'\\u2248'\n'\\xb0'\n'\\u2219'\n'\\xb7'\n'\\u221a'\n'\\u207f'\n'\\xb2'\n'\\u25a0'\n'\\xa0'\n)\n\n\n\nencoding_map={\n0x0000:0x0000,\n0x0001:0x0001,\n0x0002:0x0002,\n0x0003:0x0003,\n0x0004:0x0004,\n0x0005:0x0005,\n0x0006:0x0006,\n0x0007:0x0007,\n0x0008:0x0008,\n0x0009:0x0009,\n0x000a:0x000a,\n0x000b:0x000b,\n0x000c:0x000c,\n0x000d:0x000d,\n0x000e:0x000e,\n0x000f:0x000f,\n0x0010:0x0010,\n0x0011:0x0011,\n0x0012:0x0012,\n0x0013:0x0013,\n0x0014:0x0014,\n0x0015:0x0015,\n0x0016:0x0016,\n0x0017:0x0017,\n0x0018:0x0018,\n0x0019:0x0019,\n0x001a:0x001a,\n0x001b:0x001b,\n0x001c:0x001c,\n0x001d:0x001d,\n0x001e:0x001e,\n0x001f:0x001f,\n0x0020:0x0020,\n0x0021:0x0021,\n0x0022:0x0022,\n0x0023:0x0023,\n0x0024:0x0024,\n0x0025:0x0025,\n0x0026:0x0026,\n0x0027:0x0027,\n0x0028:0x0028,\n0x0029:0x0029,\n0x002a:0x002a,\n0x002b:0x002b,\n0x002c:0x002c,\n0x002d:0x002d,\n0x002e:0x002e,\n0x002f:0x002f,\n0x0030:0x0030,\n0x0031:0x0031,\n0x0032:0x0032,\n0x0033:0x0033,\n0x0034:0x0034,\n0x0035:0x0035,\n0x0036:0x0036,\n0x0037:0x0037,\n0x0038:0x0038,\n0x0039:0x0039,\n0x003a:0x003a,\n0x003b:0x003b,\n0x003c:0x003c,\n0x003d:0x003d,\n0x003e:0x003e,\n0x003f:0x003f,\n0x0040:0x0040,\n0x0041:0x0041,\n0x0042:0x0042,\n0x0043:0x0043,\n0x0044:0x0044,\n0x0045:0x0045,\n0x0046:0x0046,\n0x0047:0x0047,\n0x0048:0x0048,\n0x0049:0x0049,\n0x004a:0x004a,\n0x004b:0x004b,\n0x004c:0x004c,\n0x004d:0x004d,\n0x004e:0x004e,\n0x004f:0x004f,\n0x0050:0x0050,\n0x0051:0x0051,\n0x0052:0x0052,\n0x0053:0x0053,\n0x0054:0x0054,\n0x0055:0x0055,\n0x0056:0x0056,\n0x0057:0x0057,\n0x0058:0x0058,\n0x0059:0x0059,\n0x005a:0x005a,\n0x005b:0x005b,\n0x005c:0x005c,\n0x005d:0x005d,\n0x005e:0x005e,\n0x005f:0x005f,\n0x0060:0x0060,\n0x0061:0x0061,\n0x0062:0x0062,\n0x0063:0x0063,\n0x0064:0x0064,\n0x0065:0x0065,\n0x0066:0x0066,\n0x0067:0x0067,\n0x0068:0x0068,\n0x0069:0x0069,\n0x006a:0x006a,\n0x006b:0x006b,\n0x006c:0x006c,\n0x006d:0x006d,\n0x006e:0x006e,\n0x006f:0x006f,\n0x0070:0x0070,\n0x0071:0x0071,\n0x0072:0x0072,\n0x0073:0x0073,\n0x0074:0x0074,\n0x0075:0x0075,\n0x0076:0x0076,\n0x0077:0x0077,\n0x0078:0x0078,\n0x0079:0x0079,\n0x007a:0x007a,\n0x007b:0x007b,\n0x007c:0x007c,\n0x007d:0x007d,\n0x007e:0x007e,\n0x007f:0x007f,\n0x00a0:0x00ff,\n0x00a1:0x00ad,\n0x00a2:0x009b,\n0x00a3:0x009c,\n0x00a5:0x009d,\n0x00aa:0x00a6,\n0x00ab:0x00ae,\n0x00ac:0x00aa,\n0x00b0:0x00f8,\n0x00b1:0x00f1,\n0x00b2:0x00fd,\n0x00b5:0x00e6,\n0x00b7:0x00fa,\n0x00ba:0x00a7,\n0x00bb:0x00af,\n0x00bc:0x00ac,\n0x00bd:0x00ab,\n0x00bf:0x00a8,\n0x00c4:0x008e,\n0x00c5:0x008f,\n0x00c6:0x0092,\n0x00c7:0x0080,\n0x00c9:0x0090,\n0x00d1:0x00a5,\n0x00d6:0x0099,\n0x00dc:0x009a,\n0x00df:0x00e1,\n0x00e0:0x0085,\n0x00e1:0x00a0,\n0x00e2:0x0083,\n0x00e4:0x0084,\n0x00e5:0x0086,\n0x00e6:0x0091,\n0x00e7:0x0087,\n0x00e8:0x008a,\n0x00e9:0x0082,\n0x00ea:0x0088,\n0x00eb:0x0089,\n0x00ec:0x008d,\n0x00ed:0x00a1,\n0x00ee:0x008c,\n0x00ef:0x008b,\n0x00f1:0x00a4,\n0x00f2:0x0095,\n0x00f3:0x00a2,\n0x00f4:0x0093,\n0x00f6:0x0094,\n0x00f7:0x00f6,\n0x00f9:0x0097,\n0x00fa:0x00a3,\n0x00fb:0x0096,\n0x00fc:0x0081,\n0x00ff:0x0098,\n0x0192:0x009f,\n0x0393:0x00e2,\n0x0398:0x00e9,\n0x03a3:0x00e4,\n0x03a6:0x00e8,\n0x03a9:0x00ea,\n0x03b1:0x00e0,\n0x03b4:0x00eb,\n0x03b5:0x00ee,\n0x03c0:0x00e3,\n0x03c3:0x00e5,\n0x03c4:0x00e7,\n0x03c6:0x00ed,\n0x207f:0x00fc,\n0x20a7:0x009e,\n0x2219:0x00f9,\n0x221a:0x00fb,\n0x221e:0x00ec,\n0x2229:0x00ef,\n0x2248:0x00f7,\n0x2261:0x00f0,\n0x2264:0x00f3,\n0x2265:0x00f2,\n0x2310:0x00a9,\n0x2320:0x00f4,\n0x2321:0x00f5,\n0x2500:0x00c4,\n0x2502:0x00b3,\n0x250c:0x00da,\n0x2510:0x00bf,\n0x2514:0x00c0,\n0x2518:0x00d9,\n0x251c:0x00c3,\n0x2524:0x00b4,\n0x252c:0x00c2,\n0x2534:0x00c1,\n0x253c:0x00c5,\n0x2550:0x00cd,\n0x2551:0x00ba,\n0x2552:0x00d5,\n0x2553:0x00d6,\n0x2554:0x00c9,\n0x2555:0x00b8,\n0x2556:0x00b7,\n0x2557:0x00bb,\n0x2558:0x00d4,\n0x2559:0x00d3,\n0x255a:0x00c8,\n0x255b:0x00be,\n0x255c:0x00bd,\n0x255d:0x00bc,\n0x255e:0x00c6,\n0x255f:0x00c7,\n0x2560:0x00cc,\n0x2561:0x00b5,\n0x2562:0x00b6,\n0x2563:0x00b9,\n0x2564:0x00d1,\n0x2565:0x00d2,\n0x2566:0x00cb,\n0x2567:0x00cf,\n0x2568:0x00d0,\n0x2569:0x00ca,\n0x256a:0x00d8,\n0x256b:0x00d7,\n0x256c:0x00ce,\n0x2580:0x00df,\n0x2584:0x00dc,\n0x2588:0x00db,\n0x258c:0x00dd,\n0x2590:0x00de,\n0x2591:0x00b0,\n0x2592:0x00b1,\n0x2593:0x00b2,\n0x25a0:0x00fe,\n}\n"], "concurrent.futures._base": [".py", "\n\n\n__author__='Brian Quinlan (brian@sweetapp.com)'\n\nimport collections\nimport logging\nimport threading\nimport time\n\nFIRST_COMPLETED='FIRST_COMPLETED'\nFIRST_EXCEPTION='FIRST_EXCEPTION'\nALL_COMPLETED='ALL_COMPLETED'\n_AS_COMPLETED='_AS_COMPLETED'\n\n\nPENDING='PENDING'\nRUNNING='RUNNING'\n\nCANCELLED='CANCELLED'\n\nCANCELLED_AND_NOTIFIED='CANCELLED_AND_NOTIFIED'\nFINISHED='FINISHED'\n\n_FUTURE_STATES=[\nPENDING,\nRUNNING,\nCANCELLED,\nCANCELLED_AND_NOTIFIED,\nFINISHED\n]\n\n_STATE_TO_DESCRIPTION_MAP={\nPENDING:\"pending\",\nRUNNING:\"running\",\nCANCELLED:\"cancelled\",\nCANCELLED_AND_NOTIFIED:\"cancelled\",\nFINISHED:\"finished\"\n}\n\n\nLOGGER=logging.getLogger(\"concurrent.futures\")\n\nclass Error(Exception):\n ''\n pass\n \nclass CancelledError(Error):\n ''\n pass\n \nclass TimeoutError(Error):\n ''\n pass\n \nclass _Waiter(object):\n ''\n def __init__(self):\n self.event=threading.Event()\n self.finished_futures=[]\n \n def add_result(self,future):\n self.finished_futures.append(future)\n \n def add_exception(self,future):\n self.finished_futures.append(future)\n \n def add_cancelled(self,future):\n self.finished_futures.append(future)\n \nclass _AsCompletedWaiter(_Waiter):\n ''\n \n def __init__(self):\n super(_AsCompletedWaiter,self).__init__()\n self.lock=threading.Lock()\n \n def add_result(self,future):\n with self.lock:\n super(_AsCompletedWaiter,self).add_result(future)\n self.event.set()\n \n def add_exception(self,future):\n with self.lock:\n super(_AsCompletedWaiter,self).add_exception(future)\n self.event.set()\n \n def add_cancelled(self,future):\n with self.lock:\n super(_AsCompletedWaiter,self).add_cancelled(future)\n self.event.set()\n \nclass _FirstCompletedWaiter(_Waiter):\n ''\n \n def add_result(self,future):\n super().add_result(future)\n self.event.set()\n \n def add_exception(self,future):\n super().add_exception(future)\n self.event.set()\n \n def add_cancelled(self,future):\n super().add_cancelled(future)\n self.event.set()\n \nclass _AllCompletedWaiter(_Waiter):\n ''\n \n def __init__(self,num_pending_calls,stop_on_exception):\n self.num_pending_calls=num_pending_calls\n self.stop_on_exception=stop_on_exception\n self.lock=threading.Lock()\n super().__init__()\n \n def _decrement_pending_calls(self):\n with self.lock:\n self.num_pending_calls -=1\n if not self.num_pending_calls:\n self.event.set()\n \n def add_result(self,future):\n super().add_result(future)\n self._decrement_pending_calls()\n \n def add_exception(self,future):\n super().add_exception(future)\n if self.stop_on_exception:\n self.event.set()\n else :\n self._decrement_pending_calls()\n \n def add_cancelled(self,future):\n super().add_cancelled(future)\n self._decrement_pending_calls()\n \nclass _AcquireFutures(object):\n ''\n \n def __init__(self,futures):\n self.futures=sorted(futures,key=id)\n \n def __enter__(self):\n for future in self.futures:\n future._condition.acquire()\n \n def __exit__(self,*args):\n for future in self.futures:\n future._condition.release()\n \ndef _create_and_install_waiters(fs,return_when):\n if return_when ==_AS_COMPLETED:\n waiter=_AsCompletedWaiter()\n elif return_when ==FIRST_COMPLETED:\n waiter=_FirstCompletedWaiter()\n else :\n pending_count=sum(\n f._state not in [CANCELLED_AND_NOTIFIED,FINISHED]for f in fs)\n \n if return_when ==FIRST_EXCEPTION:\n waiter=_AllCompletedWaiter(pending_count,stop_on_exception=True )\n elif return_when ==ALL_COMPLETED:\n waiter=_AllCompletedWaiter(pending_count,stop_on_exception=False )\n else :\n raise ValueError(\"Invalid return condition: %r\"%return_when)\n \n for f in fs:\n f._waiters.append(waiter)\n \n return waiter\n \ndef as_completed(fs,timeout=None ):\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n if timeout is not None :\n end_time=timeout+time.time()\n \n fs=set(fs)\n with _AcquireFutures(fs):\n finished=set(\n f for f in fs\n if f._state in [CANCELLED_AND_NOTIFIED,FINISHED])\n pending=fs -finished\n waiter=_create_and_install_waiters(fs,_AS_COMPLETED)\n \n try :\n yield from finished\n \n while pending:\n if timeout is None :\n wait_timeout=None\n else :\n wait_timeout=end_time -time.time()\n if wait_timeout <0:\n raise TimeoutError(\n '%d (of %d) futures unfinished'%(\n len(pending),len(fs)))\n \n waiter.event.wait(wait_timeout)\n \n with waiter.lock:\n finished=waiter.finished_futures\n waiter.finished_futures=[]\n waiter.event.clear()\n \n for future in finished:\n yield future\n pending.remove(future)\n \n finally :\n for f in fs:\n with f._condition:\n f._waiters.remove(waiter)\n \nDoneAndNotDoneFutures=collections.namedtuple(\n'DoneAndNotDoneFutures','done not_done')\ndef wait(fs,timeout=None ,return_when=ALL_COMPLETED):\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n with _AcquireFutures(fs):\n done=set(f for f in fs\n if f._state in [CANCELLED_AND_NOTIFIED,FINISHED])\n not_done=set(fs)-done\n \n if (return_when ==FIRST_COMPLETED)and done:\n return DoneAndNotDoneFutures(done,not_done)\n elif (return_when ==FIRST_EXCEPTION)and done:\n if any(f for f in done\n if not f.cancelled()and f.exception()is not None ):\n return DoneAndNotDoneFutures(done,not_done)\n \n if len(done)==len(fs):\n return DoneAndNotDoneFutures(done,not_done)\n \n waiter=_create_and_install_waiters(fs,return_when)\n \n waiter.event.wait(timeout)\n for f in fs:\n with f._condition:\n f._waiters.remove(waiter)\n \n done.update(waiter.finished_futures)\n return DoneAndNotDoneFutures(done,set(fs)-done)\n \nclass Future(object):\n ''\n \n def __init__(self):\n ''\n self._condition=threading.Condition()\n self._state=PENDING\n self._result=None\n self._exception=None\n self._waiters=[]\n self._done_callbacks=[]\n \n def _invoke_callbacks(self):\n for callback in self._done_callbacks:\n try :\n callback(self)\n except Exception:\n LOGGER.exception('exception calling callback for %r',self)\n \n def __repr__(self):\n with self._condition:\n if self._state ==FINISHED:\n if self._exception:\n return''%(\n hex(id(self)),\n _STATE_TO_DESCRIPTION_MAP[self._state],\n self._exception.__class__.__name__)\n else :\n return''%(\n hex(id(self)),\n _STATE_TO_DESCRIPTION_MAP[self._state],\n self._result.__class__.__name__)\n return''%(\n hex(id(self)),\n _STATE_TO_DESCRIPTION_MAP[self._state])\n \n def cancel(self):\n ''\n\n\n\n \n with self._condition:\n if self._state in [RUNNING,FINISHED]:\n return False\n \n if self._state in [CANCELLED,CANCELLED_AND_NOTIFIED]:\n return True\n \n self._state=CANCELLED\n self._condition.notify_all()\n \n self._invoke_callbacks()\n return True\n \n def cancelled(self):\n ''\n with self._condition:\n return self._state in [CANCELLED,CANCELLED_AND_NOTIFIED]\n \n def running(self):\n ''\n with self._condition:\n return self._state ==RUNNING\n \n def done(self):\n ''\n with self._condition:\n return self._state in [CANCELLED,CANCELLED_AND_NOTIFIED,FINISHED]\n \n def __get_result(self):\n if self._exception:\n raise self._exception\n else :\n return self._result\n \n def add_done_callback(self,fn):\n ''\n\n\n\n\n\n\n\n\n \n with self._condition:\n if self._state not in [CANCELLED,CANCELLED_AND_NOTIFIED,FINISHED]:\n self._done_callbacks.append(fn)\n return\n fn(self)\n \n def result(self,timeout=None ):\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n with self._condition:\n if self._state in [CANCELLED,CANCELLED_AND_NOTIFIED]:\n raise CancelledError()\n elif self._state ==FINISHED:\n return self.__get_result()\n \n self._condition.wait(timeout)\n \n if self._state in [CANCELLED,CANCELLED_AND_NOTIFIED]:\n raise CancelledError()\n elif self._state ==FINISHED:\n return self.__get_result()\n else :\n raise TimeoutError()\n \n def exception(self,timeout=None ):\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n \n with self._condition:\n if self._state in [CANCELLED,CANCELLED_AND_NOTIFIED]:\n raise CancelledError()\n elif self._state ==FINISHED:\n return self._exception\n \n self._condition.wait(timeout)\n \n if self._state in [CANCELLED,CANCELLED_AND_NOTIFIED]:\n raise CancelledError()\n elif self._state ==FINISHED:\n return self._exception\n else :\n raise TimeoutError()\n \n \n def set_running_or_notify_cancel(self):\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n with self._condition:\n if self._state ==CANCELLED:\n self._state=CANCELLED_AND_NOTIFIED\n for waiter in self._waiters:\n waiter.add_cancelled(self)\n \n \n return False\n elif self._state ==PENDING:\n self._state=RUNNING\n return True\n else :\n LOGGER.critical('Future %s in unexpected state: %s',\n id(self),\n self._state)\n raise RuntimeError('Future in unexpected state')\n \n def set_result(self,result):\n ''\n\n\n \n with self._condition:\n self._result=result\n self._state=FINISHED\n for waiter in self._waiters:\n waiter.add_result(self)\n self._condition.notify_all()\n self._invoke_callbacks()\n \n def set_exception(self,exception):\n ''\n\n\n \n with self._condition:\n self._exception=exception\n self._state=FINISHED\n for waiter in self._waiters:\n waiter.add_exception(self)\n self._condition.notify_all()\n self._invoke_callbacks()\n \nclass Executor(object):\n ''\n \n def submit(self,fn,*args,**kwargs):\n ''\n\n\n\n\n\n\n \n raise NotImplementedError()\n \n def map(self,fn,*iterables,timeout=None ):\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n if timeout is not None :\n end_time=timeout+time.time()\n \n fs=[self.submit(fn,*args)for args in zip(*iterables)]\n \n \n \n def result_iterator():\n try :\n for future in fs:\n if timeout is None :\n yield future.result()\n else :\n yield future.result(end_time -time.time())\n finally :\n for future in fs:\n future.cancel()\n return result_iterator()\n \n def shutdown(self,wait=True ):\n ''\n\n\n\n\n\n\n\n\n \n pass\n \n def __enter__(self):\n return self\n \n def __exit__(self,exc_type,exc_val,exc_tb):\n self.shutdown(wait=True )\n return False\n"], "encodings.cp869": [".py", "''\n\n\n\nimport codecs\n\n\n\nclass Codec(codecs.Codec):\n\n def encode(self,input,errors='strict'):\n return codecs.charmap_encode(input,errors,encoding_map)\n \n def decode(self,input,errors='strict'):\n return codecs.charmap_decode(input,errors,decoding_table)\n \nclass IncrementalEncoder(codecs.IncrementalEncoder):\n def encode(self,input,final=False ):\n return codecs.charmap_encode(input,self.errors,encoding_map)[0]\n \nclass IncrementalDecoder(codecs.IncrementalDecoder):\n def decode(self,input,final=False ):\n return codecs.charmap_decode(input,self.errors,decoding_table)[0]\n \nclass StreamWriter(Codec,codecs.StreamWriter):\n pass\n \nclass StreamReader(Codec,codecs.StreamReader):\n pass\n \n \n \ndef getregentry():\n return codecs.CodecInfo(\n name='cp869',\n encode=Codec().encode,\n decode=Codec().decode,\n incrementalencoder=IncrementalEncoder,\n incrementaldecoder=IncrementalDecoder,\n streamreader=StreamReader,\n streamwriter=StreamWriter,\n )\n \n \n \ndecoding_map=codecs.make_identity_dict(range(256))\ndecoding_map.update({\n0x0080:None ,\n0x0081:None ,\n0x0082:None ,\n0x0083:None ,\n0x0084:None ,\n0x0085:None ,\n0x0086:0x0386,\n0x0087:None ,\n0x0088:0x00b7,\n0x0089:0x00ac,\n0x008a:0x00a6,\n0x008b:0x2018,\n0x008c:0x2019,\n0x008d:0x0388,\n0x008e:0x2015,\n0x008f:0x0389,\n0x0090:0x038a,\n0x0091:0x03aa,\n0x0092:0x038c,\n0x0093:None ,\n0x0094:None ,\n0x0095:0x038e,\n0x0096:0x03ab,\n0x0097:0x00a9,\n0x0098:0x038f,\n0x0099:0x00b2,\n0x009a:0x00b3,\n0x009b:0x03ac,\n0x009c:0x00a3,\n0x009d:0x03ad,\n0x009e:0x03ae,\n0x009f:0x03af,\n0x00a0:0x03ca,\n0x00a1:0x0390,\n0x00a2:0x03cc,\n0x00a3:0x03cd,\n0x00a4:0x0391,\n0x00a5:0x0392,\n0x00a6:0x0393,\n0x00a7:0x0394,\n0x00a8:0x0395,\n0x00a9:0x0396,\n0x00aa:0x0397,\n0x00ab:0x00bd,\n0x00ac:0x0398,\n0x00ad:0x0399,\n0x00ae:0x00ab,\n0x00af:0x00bb,\n0x00b0:0x2591,\n0x00b1:0x2592,\n0x00b2:0x2593,\n0x00b3:0x2502,\n0x00b4:0x2524,\n0x00b5:0x039a,\n0x00b6:0x039b,\n0x00b7:0x039c,\n0x00b8:0x039d,\n0x00b9:0x2563,\n0x00ba:0x2551,\n0x00bb:0x2557,\n0x00bc:0x255d,\n0x00bd:0x039e,\n0x00be:0x039f,\n0x00bf:0x2510,\n0x00c0:0x2514,\n0x00c1:0x2534,\n0x00c2:0x252c,\n0x00c3:0x251c,\n0x00c4:0x2500,\n0x00c5:0x253c,\n0x00c6:0x03a0,\n0x00c7:0x03a1,\n0x00c8:0x255a,\n0x00c9:0x2554,\n0x00ca:0x2569,\n0x00cb:0x2566,\n0x00cc:0x2560,\n0x00cd:0x2550,\n0x00ce:0x256c,\n0x00cf:0x03a3,\n0x00d0:0x03a4,\n0x00d1:0x03a5,\n0x00d2:0x03a6,\n0x00d3:0x03a7,\n0x00d4:0x03a8,\n0x00d5:0x03a9,\n0x00d6:0x03b1,\n0x00d7:0x03b2,\n0x00d8:0x03b3,\n0x00d9:0x2518,\n0x00da:0x250c,\n0x00db:0x2588,\n0x00dc:0x2584,\n0x00dd:0x03b4,\n0x00de:0x03b5,\n0x00df:0x2580,\n0x00e0:0x03b6,\n0x00e1:0x03b7,\n0x00e2:0x03b8,\n0x00e3:0x03b9,\n0x00e4:0x03ba,\n0x00e5:0x03bb,\n0x00e6:0x03bc,\n0x00e7:0x03bd,\n0x00e8:0x03be,\n0x00e9:0x03bf,\n0x00ea:0x03c0,\n0x00eb:0x03c1,\n0x00ec:0x03c3,\n0x00ed:0x03c2,\n0x00ee:0x03c4,\n0x00ef:0x0384,\n0x00f0:0x00ad,\n0x00f1:0x00b1,\n0x00f2:0x03c5,\n0x00f3:0x03c6,\n0x00f4:0x03c7,\n0x00f5:0x00a7,\n0x00f6:0x03c8,\n0x00f7:0x0385,\n0x00f8:0x00b0,\n0x00f9:0x00a8,\n0x00fa:0x03c9,\n0x00fb:0x03cb,\n0x00fc:0x03b0,\n0x00fd:0x03ce,\n0x00fe:0x25a0,\n0x00ff:0x00a0,\n})\n\n\n\ndecoding_table=(\n'\\x00'\n'\\x01'\n'\\x02'\n'\\x03'\n'\\x04'\n'\\x05'\n'\\x06'\n'\\x07'\n'\\x08'\n'\\t'\n'\\n'\n'\\x0b'\n'\\x0c'\n'\\r'\n'\\x0e'\n'\\x0f'\n'\\x10'\n'\\x11'\n'\\x12'\n'\\x13'\n'\\x14'\n'\\x15'\n'\\x16'\n'\\x17'\n'\\x18'\n'\\x19'\n'\\x1a'\n'\\x1b'\n'\\x1c'\n'\\x1d'\n'\\x1e'\n'\\x1f'\n' '\n'!'\n'\"'\n'#'\n'$'\n'%'\n'&'\n\"'\"\n'('\n')'\n'*'\n'+'\n','\n'-'\n'.'\n'/'\n'0'\n'1'\n'2'\n'3'\n'4'\n'5'\n'6'\n'7'\n'8'\n'9'\n':'\n';'\n'<'\n'='\n'>'\n'?'\n'@'\n'A'\n'B'\n'C'\n'D'\n'E'\n'F'\n'G'\n'H'\n'I'\n'J'\n'K'\n'L'\n'M'\n'N'\n'O'\n'P'\n'Q'\n'R'\n'S'\n'T'\n'U'\n'V'\n'W'\n'X'\n'Y'\n'Z'\n'['\n'\\\\'\n']'\n'^'\n'_'\n'`'\n'a'\n'b'\n'c'\n'd'\n'e'\n'f'\n'g'\n'h'\n'i'\n'j'\n'k'\n'l'\n'm'\n'n'\n'o'\n'p'\n'q'\n'r'\n's'\n't'\n'u'\n'v'\n'w'\n'x'\n'y'\n'z'\n'{'\n'|'\n'}'\n'~'\n'\\x7f'\n'\\ufffe'\n'\\ufffe'\n'\\ufffe'\n'\\ufffe'\n'\\ufffe'\n'\\ufffe'\n'\\u0386'\n'\\ufffe'\n'\\xb7'\n'\\xac'\n'\\xa6'\n'\\u2018'\n'\\u2019'\n'\\u0388'\n'\\u2015'\n'\\u0389'\n'\\u038a'\n'\\u03aa'\n'\\u038c'\n'\\ufffe'\n'\\ufffe'\n'\\u038e'\n'\\u03ab'\n'\\xa9'\n'\\u038f'\n'\\xb2'\n'\\xb3'\n'\\u03ac'\n'\\xa3'\n'\\u03ad'\n'\\u03ae'\n'\\u03af'\n'\\u03ca'\n'\\u0390'\n'\\u03cc'\n'\\u03cd'\n'\\u0391'\n'\\u0392'\n'\\u0393'\n'\\u0394'\n'\\u0395'\n'\\u0396'\n'\\u0397'\n'\\xbd'\n'\\u0398'\n'\\u0399'\n'\\xab'\n'\\xbb'\n'\\u2591'\n'\\u2592'\n'\\u2593'\n'\\u2502'\n'\\u2524'\n'\\u039a'\n'\\u039b'\n'\\u039c'\n'\\u039d'\n'\\u2563'\n'\\u2551'\n'\\u2557'\n'\\u255d'\n'\\u039e'\n'\\u039f'\n'\\u2510'\n'\\u2514'\n'\\u2534'\n'\\u252c'\n'\\u251c'\n'\\u2500'\n'\\u253c'\n'\\u03a0'\n'\\u03a1'\n'\\u255a'\n'\\u2554'\n'\\u2569'\n'\\u2566'\n'\\u2560'\n'\\u2550'\n'\\u256c'\n'\\u03a3'\n'\\u03a4'\n'\\u03a5'\n'\\u03a6'\n'\\u03a7'\n'\\u03a8'\n'\\u03a9'\n'\\u03b1'\n'\\u03b2'\n'\\u03b3'\n'\\u2518'\n'\\u250c'\n'\\u2588'\n'\\u2584'\n'\\u03b4'\n'\\u03b5'\n'\\u2580'\n'\\u03b6'\n'\\u03b7'\n'\\u03b8'\n'\\u03b9'\n'\\u03ba'\n'\\u03bb'\n'\\u03bc'\n'\\u03bd'\n'\\u03be'\n'\\u03bf'\n'\\u03c0'\n'\\u03c1'\n'\\u03c3'\n'\\u03c2'\n'\\u03c4'\n'\\u0384'\n'\\xad'\n'\\xb1'\n'\\u03c5'\n'\\u03c6'\n'\\u03c7'\n'\\xa7'\n'\\u03c8'\n'\\u0385'\n'\\xb0'\n'\\xa8'\n'\\u03c9'\n'\\u03cb'\n'\\u03b0'\n'\\u03ce'\n'\\u25a0'\n'\\xa0'\n)\n\n\n\nencoding_map={\n0x0000:0x0000,\n0x0001:0x0001,\n0x0002:0x0002,\n0x0003:0x0003,\n0x0004:0x0004,\n0x0005:0x0005,\n0x0006:0x0006,\n0x0007:0x0007,\n0x0008:0x0008,\n0x0009:0x0009,\n0x000a:0x000a,\n0x000b:0x000b,\n0x000c:0x000c,\n0x000d:0x000d,\n0x000e:0x000e,\n0x000f:0x000f,\n0x0010:0x0010,\n0x0011:0x0011,\n0x0012:0x0012,\n0x0013:0x0013,\n0x0014:0x0014,\n0x0015:0x0015,\n0x0016:0x0016,\n0x0017:0x0017,\n0x0018:0x0018,\n0x0019:0x0019,\n0x001a:0x001a,\n0x001b:0x001b,\n0x001c:0x001c,\n0x001d:0x001d,\n0x001e:0x001e,\n0x001f:0x001f,\n0x0020:0x0020,\n0x0021:0x0021,\n0x0022:0x0022,\n0x0023:0x0023,\n0x0024:0x0024,\n0x0025:0x0025,\n0x0026:0x0026,\n0x0027:0x0027,\n0x0028:0x0028,\n0x0029:0x0029,\n0x002a:0x002a,\n0x002b:0x002b,\n0x002c:0x002c,\n0x002d:0x002d,\n0x002e:0x002e,\n0x002f:0x002f,\n0x0030:0x0030,\n0x0031:0x0031,\n0x0032:0x0032,\n0x0033:0x0033,\n0x0034:0x0034,\n0x0035:0x0035,\n0x0036:0x0036,\n0x0037:0x0037,\n0x0038:0x0038,\n0x0039:0x0039,\n0x003a:0x003a,\n0x003b:0x003b,\n0x003c:0x003c,\n0x003d:0x003d,\n0x003e:0x003e,\n0x003f:0x003f,\n0x0040:0x0040,\n0x0041:0x0041,\n0x0042:0x0042,\n0x0043:0x0043,\n0x0044:0x0044,\n0x0045:0x0045,\n0x0046:0x0046,\n0x0047:0x0047,\n0x0048:0x0048,\n0x0049:0x0049,\n0x004a:0x004a,\n0x004b:0x004b,\n0x004c:0x004c,\n0x004d:0x004d,\n0x004e:0x004e,\n0x004f:0x004f,\n0x0050:0x0050,\n0x0051:0x0051,\n0x0052:0x0052,\n0x0053:0x0053,\n0x0054:0x0054,\n0x0055:0x0055,\n0x0056:0x0056,\n0x0057:0x0057,\n0x0058:0x0058,\n0x0059:0x0059,\n0x005a:0x005a,\n0x005b:0x005b,\n0x005c:0x005c,\n0x005d:0x005d,\n0x005e:0x005e,\n0x005f:0x005f,\n0x0060:0x0060,\n0x0061:0x0061,\n0x0062:0x0062,\n0x0063:0x0063,\n0x0064:0x0064,\n0x0065:0x0065,\n0x0066:0x0066,\n0x0067:0x0067,\n0x0068:0x0068,\n0x0069:0x0069,\n0x006a:0x006a,\n0x006b:0x006b,\n0x006c:0x006c,\n0x006d:0x006d,\n0x006e:0x006e,\n0x006f:0x006f,\n0x0070:0x0070,\n0x0071:0x0071,\n0x0072:0x0072,\n0x0073:0x0073,\n0x0074:0x0074,\n0x0075:0x0075,\n0x0076:0x0076,\n0x0077:0x0077,\n0x0078:0x0078,\n0x0079:0x0079,\n0x007a:0x007a,\n0x007b:0x007b,\n0x007c:0x007c,\n0x007d:0x007d,\n0x007e:0x007e,\n0x007f:0x007f,\n0x00a0:0x00ff,\n0x00a3:0x009c,\n0x00a6:0x008a,\n0x00a7:0x00f5,\n0x00a8:0x00f9,\n0x00a9:0x0097,\n0x00ab:0x00ae,\n0x00ac:0x0089,\n0x00ad:0x00f0,\n0x00b0:0x00f8,\n0x00b1:0x00f1,\n0x00b2:0x0099,\n0x00b3:0x009a,\n0x00b7:0x0088,\n0x00bb:0x00af,\n0x00bd:0x00ab,\n0x0384:0x00ef,\n0x0385:0x00f7,\n0x0386:0x0086,\n0x0388:0x008d,\n0x0389:0x008f,\n0x038a:0x0090,\n0x038c:0x0092,\n0x038e:0x0095,\n0x038f:0x0098,\n0x0390:0x00a1,\n0x0391:0x00a4,\n0x0392:0x00a5,\n0x0393:0x00a6,\n0x0394:0x00a7,\n0x0395:0x00a8,\n0x0396:0x00a9,\n0x0397:0x00aa,\n0x0398:0x00ac,\n0x0399:0x00ad,\n0x039a:0x00b5,\n0x039b:0x00b6,\n0x039c:0x00b7,\n0x039d:0x00b8,\n0x039e:0x00bd,\n0x039f:0x00be,\n0x03a0:0x00c6,\n0x03a1:0x00c7,\n0x03a3:0x00cf,\n0x03a4:0x00d0,\n0x03a5:0x00d1,\n0x03a6:0x00d2,\n0x03a7:0x00d3,\n0x03a8:0x00d4,\n0x03a9:0x00d5,\n0x03aa:0x0091,\n0x03ab:0x0096,\n0x03ac:0x009b,\n0x03ad:0x009d,\n0x03ae:0x009e,\n0x03af:0x009f,\n0x03b0:0x00fc,\n0x03b1:0x00d6,\n0x03b2:0x00d7,\n0x03b3:0x00d8,\n0x03b4:0x00dd,\n0x03b5:0x00de,\n0x03b6:0x00e0,\n0x03b7:0x00e1,\n0x03b8:0x00e2,\n0x03b9:0x00e3,\n0x03ba:0x00e4,\n0x03bb:0x00e5,\n0x03bc:0x00e6,\n0x03bd:0x00e7,\n0x03be:0x00e8,\n0x03bf:0x00e9,\n0x03c0:0x00ea,\n0x03c1:0x00eb,\n0x03c2:0x00ed,\n0x03c3:0x00ec,\n0x03c4:0x00ee,\n0x03c5:0x00f2,\n0x03c6:0x00f3,\n0x03c7:0x00f4,\n0x03c8:0x00f6,\n0x03c9:0x00fa,\n0x03ca:0x00a0,\n0x03cb:0x00fb,\n0x03cc:0x00a2,\n0x03cd:0x00a3,\n0x03ce:0x00fd,\n0x2015:0x008e,\n0x2018:0x008b,\n0x2019:0x008c,\n0x2500:0x00c4,\n0x2502:0x00b3,\n0x250c:0x00da,\n0x2510:0x00bf,\n0x2514:0x00c0,\n0x2518:0x00d9,\n0x251c:0x00c3,\n0x2524:0x00b4,\n0x252c:0x00c2,\n0x2534:0x00c1,\n0x253c:0x00c5,\n0x2550:0x00cd,\n0x2551:0x00ba,\n0x2554:0x00c9,\n0x2557:0x00bb,\n0x255a:0x00c8,\n0x255d:0x00bc,\n0x2560:0x00cc,\n0x2563:0x00b9,\n0x2566:0x00cb,\n0x2569:0x00ca,\n0x256c:0x00ce,\n0x2580:0x00df,\n0x2584:0x00dc,\n0x2588:0x00db,\n0x2591:0x00b0,\n0x2592:0x00b1,\n0x2593:0x00b2,\n0x25a0:0x00fe,\n}\n"], "multiprocessing.connection": [".py", "\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n__all__=['Client','Listener','Pipe']\n\nfrom queue import Queue\n\n\nfamilies=[None ]\n\n\nclass Listener(object):\n\n def __init__(self,address=None ,family=None ,backlog=1):\n self._backlog_queue=Queue(backlog)\n \n def accept(self):\n return Connection(*self._backlog_queue.get())\n \n def close(self):\n self._backlog_queue=None\n \n address=property(lambda self:self._backlog_queue)\n \n def __enter__(self):\n return self\n \n def __exit__(self,exc_type,exc_value,exc_tb):\n self.close()\n \n \ndef Client(address):\n _in,_out=Queue(),Queue()\n address.put((_out,_in))\n return Connection(_in,_out)\n \n \ndef Pipe(duplex=True ):\n a,b=Queue(),Queue()\n return Connection(a,b),Connection(b,a)\n \n \nclass Connection(object):\n\n def __init__(self,_in,_out):\n self._out=_out\n self._in=_in\n self.send=self.send_bytes=_out.put\n self.recv=self.recv_bytes=_in.get\n \n def poll(self,timeout=0.0):\n if self._in.qsize()>0:\n return True\n if timeout <=0.0:\n return False\n self._in.not_empty.acquire()\n self._in.not_empty.wait(timeout)\n self._in.not_empty.release()\n return self._in.qsize()>0\n \n def close(self):\n pass\n \n def __enter__(self):\n return self\n \n def __exit__(self,exc_type,exc_value,exc_tb):\n self.close()\n"], "multiprocessing.dummy.connection": [".py", "\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n__all__=['Client','Listener','Pipe']\n\nfrom queue import Queue\n\n\nfamilies=[None ]\n\n\nclass Listener(object):\n\n def __init__(self,address=None ,family=None ,backlog=1):\n self._backlog_queue=Queue(backlog)\n \n def accept(self):\n return Connection(*self._backlog_queue.get())\n \n def close(self):\n self._backlog_queue=None\n \n address=property(lambda self:self._backlog_queue)\n \n def __enter__(self):\n return self\n \n def __exit__(self,exc_type,exc_value,exc_tb):\n self.close()\n \n \ndef Client(address):\n _in,_out=Queue(),Queue()\n address.put((_out,_in))\n return Connection(_in,_out)\n \n \ndef Pipe(duplex=True ):\n a,b=Queue(),Queue()\n return Connection(a,b),Connection(b,a)\n \n \nclass Connection(object):\n\n def __init__(self,_in,_out):\n self._out=_out\n self._in=_in\n self.send=self.send_bytes=_out.put\n self.recv=self.recv_bytes=_in.get\n \n def poll(self,timeout=0.0):\n if self._in.qsize()>0:\n return True\n if timeout <=0.0:\n return False\n self._in.not_empty.acquire()\n self._in.not_empty.wait(timeout)\n self._in.not_empty.release()\n return self._in.qsize()>0\n \n def close(self):\n pass\n \n def __enter__(self):\n return self\n \n def __exit__(self,exc_type,exc_value,exc_tb):\n self.close()\n"], "http": [".py", "", 1], "time": [".py", "from browser import window\n\n\ndate=window.Date.new\n\nnow=window.Date.now\n\n\n\n\n\n\n\n_STRUCT_TM_ITEMS=9\n\n\n\n\n\ndef _get_day_of_year(arg):\n ''\n\n\n\n\n\n\n\n\n\n \n ml=[31,28,31,30,31,30,31,31,30,31,30,31]\n if arg[0]%4 ==0:\n ml[1]+=1\n i=1\n yday=0\n while i mm >13:raise ValueError(\"month out of range\")\n \n dd=t[2]\n if dd ==0:dd=1\n if -1 >dd >32:raise ValueError(\"day of month out of range\")\n \n hh=t[3]\n if -1 >hh >24:raise ValueError(\"hour out of range\")\n \n minu=t[4]\n if -1 >minu >60:raise ValueError(\"minute out of range\")\n \n ss=t[5]\n if -1 >ss >62:raise ValueError(\"seconds out of range\")\n \n wd=t[6]%7\n if wd <-2:raise ValueError(\"day of week out of range\")\n \n dy=t[7]\n if dy ==0:dy=1\n if -1 >dy >367:raise ValueError(\"day of year out of range\")\n \n return t[0],mm,dd,hh,minu,ss,wd,dy,t[-1]\n \n \ndef _is_dst(secs=None ):\n ''\n d=date()\n if secs is not None :\n d=date(secs *1000)\n \n \n jan=date(d.getFullYear(),0,1)\n jul=date(d.getFullYear(),6,1)\n dst=int(d.getTimezoneOffset()=0 else 6\n tmp=struct_time([d.getUTCFullYear(),\n d.getUTCMonth()+1,d.getUTCDate(),\n d.getUTCHours(),d.getUTCMinutes(),d.getUTCSeconds(),\n wday,0,0])\n tmp.args[7]=_get_day_of_year(tmp.args)\n return tmp\n \ndef localtime(secs=None ):\n d=date()\n if secs is not None :\n d=date(secs *1000)\n dst=_is_dst(secs)\n wday=d.getDay()-1 if d.getDay()-1 >=0 else 6\n tmp=struct_time([d.getFullYear(),\n d.getMonth()+1,d.getDate(),\n d.getHours(),d.getMinutes(),d.getSeconds(),\n wday,0,dst])\n tmp.args[7]=_get_day_of_year(tmp.args)\n return tmp\n \ndef mktime(t):\n if isinstance(t,struct_time):\n d1=date(t.tm_year,t.tm_mon -1,t.tm_mday,\n t.tm_hour,t.tm_min,t.tm_sec,0).getTime()\n elif isinstance(t,tuple):\n d1=date(t[0],t[1]-1,t[2],t[3],t[4],t[5],0).getTime()\n else :\n raise ValueError(\"Tuple or struct_time argument required\")\n d2=date(0).getTime()\n return (d1 -d2)/1000.\n \ndef monotonic():\n return now()/1000.\n \ndef perf_counter():\n return now()/1000.\n \ndef process_time():\n return now()/1000.\n \ndef time():\n return float(date().getTime()/1000)\n \ndef sleep(secs):\n ''\n\n \n \n \n \n raise NotImplementedError(\"Blocking functions like time.sleep() are not \"\n \"supported in the browser. Use functions in module browser.timer \"\n \"instead.\")\n \ndef strftime(_format,t=None ):\n\n def ns(t,nb):\n \n res=str(t)\n while len(res)>>2]|=(q[b>>>2]>>>24-8*(b%4)&255)<<24-8*((d+b)%4);else if(65535>>2]=q[b>>>2];else c.push.apply(c,q);this.sigBytes+=a;return this},clamp:function(){var a=this.words,c=this.sigBytes;a[c>>>2]&=4294967295<<\n32-8*(c%4);a.length=e.ceil(c/4)},clone:function(){var a=f.clone.call(this);a.words=this.words.slice(0);return a},random:function(a){for(var c=[],b=0;b>>2]>>>24-8*(d%4)&255;b.push((f>>>4).toString(16));b.push((f&15).toString(16))}return b.join(\"\")},parse:function(a){for(var c=a.length,b=[],d=0;d>>3]|=parseInt(a.substr(d,\n2),16)<<24-4*(d%8);return new n.init(b,c/2)}},g=b.Latin1={stringify:function(a){var c=a.words;a=a.sigBytes;for(var b=[],d=0;d>>2]>>>24-8*(d%4)&255));return b.join(\"\")},parse:function(a){for(var c=a.length,b=[],d=0;d>>2]|=(a.charCodeAt(d)&255)<<24-8*(d%4);return new n.init(b,c)}},r=b.Utf8={stringify:function(a){try{return decodeURIComponent(escape(g.stringify(a)))}catch(c){throw Error(\"Malformed UTF-8 data\");}},parse:function(a){return g.parse(unescape(encodeURIComponent(a)))}},\nk=j.BufferedBlockAlgorithm=f.extend({reset:function(){this._data=new n.init;this._nDataBytes=0},_append:function(a){\"string\"==typeof a&&(a=r.parse(a));this._data.concat(a);this._nDataBytes+=a.sigBytes},_process:function(a){var c=this._data,b=c.words,d=c.sigBytes,f=this.blockSize,h=d/(4*f),h=a?e.ceil(h):e.max((h|0)-this._minBufferSize,0);a=h*f;d=e.min(4*a,d);if(a){for(var g=0;ga;a++){if(16>a)l[a]=f[n+a]|0;else{var c=l[a-3]^l[a-8]^l[a-14]^l[a-16];l[a]=c<<1|c>>>31}c=(h<<5|h>>>27)+j+l[a];c=20>a?c+((g&e|~g&k)+1518500249):40>a?c+((g^e^k)+1859775393):60>a?c+((g&e|g&k|e&k)-1894007588):c+((g^e^\nk)-899497514);j=k;k=e;e=g<<30|g>>>2;g=h;h=c}b[0]=b[0]+h|0;b[1]=b[1]+g|0;b[2]=b[2]+e|0;b[3]=b[3]+k|0;b[4]=b[4]+j|0},_doFinalize:function(){var f=this._data,e=f.words,b=8*this._nDataBytes,h=8*f.sigBytes;e[h>>>5]|=128<<24-h%32;e[(h+64>>>9<<4)+14]=Math.floor(b/4294967296);e[(h+64>>>9<<4)+15]=b;f.sigBytes=4*e.length;this._process();return this._hash},clone:function(){var e=j.clone.call(this);e._hash=this._hash.clone();return e}});e.SHA1=j._createHelper(m);e.HmacSHA1=j._createHmacHelper(m)})();\n"], "operator": [".py", "#!/usr/bin/env python3\n''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\ndef lt(a,b):\n ''\n return a =b\n__ge__=ge\n\ndef gt(a,b):\n ''\n return a >b\n__gt__=gt\n\ndef not_(a):\n ''\n return not a\n__not__=not_\n\ndef truth(a):\n ''\n \n return bool(a)\n \ndef is_(a,b):\n ''\n return a is b\n \n \n \n \n \n \n \n \n \n \n \n__abs__=abs\nabs=abs\n\n\ndef add(a,b):\n ''\n return a+b\n__add__=add\n\ndef and_(a,b):\n ''\n return a&b\n__and__=and_\n\ndef floordiv(a,b):\n ''\n return a //b\n__floordiv__=floordiv\n\ndef index(a):\n ''\n return a.__index__()\n__index__=index\n\ndef inv(a):\n ''\n return ~a\n \ninvert=__inv__=__invert__=inv\n\ndef lshift(a,b):\n ''\n return a <>b\n__rshift__=rshift\n\ndef sub(a,b):\n ''\n return a -b\n__sub__=sub\n\ndef truediv(a,b):\n ''\n return a /b\n__truediv__=truediv\n\ndef xor(a,b):\n ''\n return a ^b\n__xor__=xor\n\ndef concat(a,b):\n ''\n if not (hasattr(a,'__getitem__')and hasattr(b,'__getitem__')):\n raise TypeError('a and b must be sequences')\n return a+b\n__concat__=concat\n\ndef contains(a,b):\n ''\n return b in a\n__contains__=contains\n\ndef countOf(a,b):\n ''\n count=0\n for i in a:\n if i ==b:\n count +=1\n return count\n \ndef delitem(a,b):\n ''\n del a[b]\n__delitem__=delitem\n\ndef getitem(a,b):\n ''\n return a[b]\n__getitem__=getitem\n\n\ndef indexOf(a,b):\n ''\n \n for i,j in enumerate(a):\n if j ==b:\n return i\n else :\n raise ValueError('b not found in a')\n \ndef setitem(a,b,c):\n ''\n a[b]=c\n__setitem__=setitem\n\n\n\nclass attrgetter:\n ''\n\n\n\n\n\n \n def __init__(self,attr,*attrs):\n self._attrs=(attr,)\n self._attrs +=attrs\n if any(not isinstance(attr,str)for attr in self._attrs):\n raise TypeError('attribute name must be a string')\n \n @staticmethod\n def _resolve_attr(obj,attr):\n for name in attr.split('.'):\n \n obj=getattr(obj,name)\n return obj\n \n def __call__(self,obj):\n if len(self._attrs)==1:\n return self._resolve_attr(obj,self._attrs[0])\n return tuple(self._resolve_attr(obj,attr)for attr in self._attrs)\n \nclass itemgetter:\n ''\n\n\n\n \n def __init__(self,item,*items):\n self._items=(item,)\n self._items +=items\n \n def __call__(self,obj):\n if len(self._items)==1:\n return obj[self._items[0]]\n return tuple(obj[item]for item in self._items)\n \nclass methodcaller:\n ''\n\n\n\n\n \n \n def __init__(self,name,*args,**kwargs):\n self._name=name\n self._args=args\n self._kwargs=kwargs\n \n def __call__(self,obj):\n return getattr(obj,self._name)(*self._args,**self._kwargs)\n \n \ndef iadd(a,b):\n ''\n a +=b\n return a\n__iadd__=iadd\n\ndef iand(a,b):\n ''\n a &=b\n return a\n__iand__=iand\n\ndef iconcat(a,b):\n ''\n if not (hasattr(a,'__getitem__')and hasattr(b,'__getitem__')):\n raise TypeError('a and b must be sequences')\n a +=b\n return a\n__iconcat__=iconcat\n\ndef ifloordiv(a,b):\n ''\n a //=b\n return a\n__ifloordiv__=ifloordiv\n\ndef ilshift(a,b):\n ''\n a <<=b\n return a\n__ilshift__=ilshift\n\ndef imod(a,b):\n ''\n a %=b\n return a\n__imod__=imod\n\ndef imul(a,b):\n ''\n a *=b\n return a\n__imul__=imul\n\ndef ior(a,b):\n ''\n a |=b\n return a\n__ior__=ior\n\ndef ipow(a,b):\n ''\n a **=b\n return a\n__ipow__=ipow\n\ndef irshift(a,b):\n ''\n a >>=b\n return a\n__irshift__=irshift\n\ndef isub(a,b):\n ''\n a -=b\n return a\n__isub__=isub\n\ndef itruediv(a,b):\n ''\n a /=b\n return a\n__itruediv__=itruediv\n\ndef ixor(a,b):\n ''\n a ^=b\n return a\n__ixor__=ixor\n\ndef length_hint(obj,default=0):\n ''\n\n\n\n\n\n\n \n try :\n return len(obj)\n except TypeError:\n try :\n val=obj.__length_hint__()\n if val is NotImplemented:\n raise TypeError\n except (AttributeError,TypeError):\n return default\n else :\n if not val >0:\n raise ValueError('default must be > 0')\n return val\n \n \n \n \n \n \n"], "_sys": [".js", "var $module=(function($B){\n var _b_=$B.builtins\n return {\n // Called \"Getframe\" because \"_getframe\" wouldn't be imported in \n // sys.py with \"from _sys import *\"\n Getframe : function(depth){\n return $B._frame($B.frames_stack, depth)\n },\n argv:\n {'__get__':function(){return 'essai'}\n },\n modules :\n {'__get__':function(){return $B.obj_dict($B.imported)},\n '__set__':function(self, obj, value){ throw _b_.TypeError(\"Read only property 'sys.modules'\") }\n },\n path: \n {'__get__':function(){return $B.path},\n '__set__':function(self, obj, value){ $B.path = value }\n },\n meta_path: \n {'__get__':function(){return $B.meta_path},\n '__set__':function(self, obj, value){ $B.meta_path = value }\n },\n path_hooks: \n {'__get__':function(){return $B.path_hooks},\n '__set__':function(self, obj, value){ $B.path_hooks = value }\n },\n path_importer_cache: \n {'__get__':function(){return _b_.dict($B.JSObject($B.path_importer_cache))},\n '__set__':function(self, obj, value){ throw _b_.TypeError(\"Read only property 'sys.path_importer_cache'\") }\n },\n stderr : {\n __get__:function(){return $B.stderr},\n __set__:function(self, obj, value){$B.stderr = value},\n write:function(data){_b_.getattr($B.stderr,\"write\")(data)}\n },\n stdout : {\n __get__:function(){return $B.stdout},\n __set__:function(self, obj, value){$B.stdout = value},\n write:function(data){_b_.getattr($B.stdout,\"write\")(data)}\n },\n stdin : $B.stdin\n }\n})(__BRYTHON__)\n"], "importlib.abc": [".py", "''\nfrom .import _bootstrap\nfrom .import machinery\ntry :\n import _frozen_importlib\nexcept ImportError as exc:\n if exc.name !='_frozen_importlib':\n raise\n _frozen_importlib=None\nimport abc\nimport imp\nimport marshal\nimport sys\nimport tokenize\nimport warnings\n\n\ndef _register(abstract_cls,*classes):\n for cls in classes:\n abstract_cls.register(cls)\n if _frozen_importlib is not None :\n frozen_cls=getattr(_frozen_importlib,cls.__name__)\n abstract_cls.register(frozen_cls)\n \n \nclass Finder(metaclass=abc.ABCMeta):\n\n ''\n\n\n\n\n\n \n \n @abc.abstractmethod\n def find_module(self,fullname,path=None ):\n ''\n\n\n \n raise NotImplementedError\n \n \nclass MetaPathFinder(Finder):\n\n ''\n \n @abc.abstractmethod\n def find_module(self,fullname,path):\n ''\n\n\n \n raise NotImplementedError\n \n def invalidate_caches(self):\n ''\n\n \n return NotImplemented\n \n_register(MetaPathFinder,machinery.BuiltinImporter,machinery.FrozenImporter,\nmachinery.PathFinder,machinery.WindowsRegistryFinder)\n\n\nclass PathEntryFinder(Finder):\n\n ''\n \n @abc.abstractmethod\n def find_loader(self,fullname):\n ''\n\n\n\n\n \n raise NotImplementedError\n \n find_module=_bootstrap._find_module_shim\n \n def invalidate_caches(self):\n ''\n\n \n return NotImplemented\n \n_register(PathEntryFinder,machinery.FileFinder)\n\n\nclass Loader(metaclass=abc.ABCMeta):\n\n ''\n \n @abc.abstractmethod\n def load_module(self,fullname):\n ''\n \n raise NotImplementedError\n \n @abc.abstractmethod\n def module_repr(self,module):\n ''\n \n raise NotImplementedError\n \n \nclass ResourceLoader(Loader):\n\n ''\n\n\n\n\n \n \n @abc.abstractmethod\n def get_data(self,path):\n ''\n \n raise NotImplementedError\n \n \nclass InspectLoader(Loader):\n\n ''\n\n\n\n\n \n \n @abc.abstractmethod\n def is_package(self,fullname):\n ''\n \n raise NotImplementedError\n \n @abc.abstractmethod\n def get_code(self,fullname):\n ''\n \n raise NotImplementedError\n \n @abc.abstractmethod\n def get_source(self,fullname):\n ''\n \n raise NotImplementedError\n \n_register(InspectLoader,machinery.BuiltinImporter,machinery.FrozenImporter,\nmachinery.ExtensionFileLoader)\n\n\nclass ExecutionLoader(InspectLoader):\n\n ''\n\n\n\n\n \n \n @abc.abstractmethod\n def get_filename(self,fullname):\n ''\n \n raise NotImplementedError\n \n \nclass FileLoader(_bootstrap.FileLoader,ResourceLoader,ExecutionLoader):\n\n ''\n \n \n_register(FileLoader,machinery.SourceFileLoader,\nmachinery.SourcelessFileLoader)\n\n\nclass SourceLoader(_bootstrap.SourceLoader,ResourceLoader,ExecutionLoader):\n\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n \n \n def path_mtime(self,path):\n ''\n if self.path_stats.__func__ is SourceLoader.path_stats:\n raise NotImplementedError\n return int(self.path_stats(path)['mtime'])\n \n def path_stats(self,path):\n ''\n\n\n\n\n \n if self.path_mtime.__func__ is SourceLoader.path_mtime:\n raise NotImplementedError\n return {'mtime':self.path_mtime(path)}\n \n def set_data(self,path,data):\n ''\n\n\n\n\n\n\n\n \n raise NotImplementedError\n \n_register(SourceLoader,machinery.SourceFileLoader)\n\nclass PyLoader(SourceLoader):\n\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n \n @abc.abstractmethod\n def is_package(self,fullname):\n raise NotImplementedError\n \n @abc.abstractmethod\n def source_path(self,fullname):\n ''\n \n raise NotImplementedError\n \n def get_filename(self,fullname):\n ''\n\n\n\n\n\n\n \n warnings.warn(\"importlib.abc.PyLoader is deprecated and is \"\n \"slated for removal in Python 3.4; \"\n \"use SourceLoader instead. \"\n \"See the importlib documentation on how to be \"\n \"compatible with Python 3.1 onwards.\",\n DeprecationWarning)\n path=self.source_path(fullname)\n if path is None :\n raise ImportError(name=fullname)\n else :\n return path\n \n \nclass PyPycLoader(PyLoader):\n\n ''\n\n\n\n\n\n\n\n\n\n \n \n def get_filename(self,fullname):\n ''\n path=self.source_path(fullname)\n if path is not None :\n return path\n path=self.bytecode_path(fullname)\n if path is not None :\n return path\n raise ImportError(\"no source or bytecode path available for \"\n \"{0!r}\".format(fullname),name=fullname)\n \n def get_code(self,fullname):\n ''\n warnings.warn(\"importlib.abc.PyPycLoader is deprecated and slated for \"\n \"removal in Python 3.4; use SourceLoader instead. \"\n \"If Python 3.1 compatibility is required, see the \"\n \"latest documentation for PyLoader.\",\n DeprecationWarning)\n source_timestamp=self.source_mtime(fullname)\n \n bytecode_path=self.bytecode_path(fullname)\n if bytecode_path:\n data=self.get_data(bytecode_path)\n try :\n magic=data[:4]\n if len(magic)<4:\n raise ImportError(\n \"bad magic number in {}\".format(fullname),\n name=fullname,path=bytecode_path)\n raw_timestamp=data[4:8]\n if len(raw_timestamp)<4:\n raise EOFError(\"bad timestamp in {}\".format(fullname))\n pyc_timestamp=_bootstrap._r_long(raw_timestamp)\n raw_source_size=data[8:12]\n if len(raw_source_size)!=4:\n raise EOFError(\"bad file size in {}\".format(fullname))\n \n \n bytecode=data[12:]\n \n if imp.get_magic()!=magic:\n raise ImportError(\n \"bad magic number in {}\".format(fullname),\n name=fullname,path=bytecode_path)\n \n \n if source_timestamp:\n if pyc_timestamp '\n'?'\n'@'\n'A'\n'B'\n'C'\n'D'\n'E'\n'F'\n'G'\n'H'\n'I'\n'J'\n'K'\n'L'\n'M'\n'N'\n'O'\n'P'\n'Q'\n'R'\n'S'\n'T'\n'U'\n'V'\n'W'\n'X'\n'Y'\n'Z'\n'['\n'\\\\'\n']'\n'^'\n'_'\n'`'\n'a'\n'b'\n'c'\n'd'\n'e'\n'f'\n'g'\n'h'\n'i'\n'j'\n'k'\n'l'\n'm'\n'n'\n'o'\n'p'\n'q'\n'r'\n's'\n't'\n'u'\n'v'\n'w'\n'x'\n'y'\n'z'\n'{'\n'|'\n'}'\n'~'\n'\\x7f'\n'\\xc4'\n'\\xc5'\n'\\xc7'\n'\\xc9'\n'\\xd1'\n'\\xd6'\n'\\xdc'\n'\\xe1'\n'\\xe0'\n'\\xe2'\n'\\xe4'\n'\\xe3'\n'\\xe5'\n'\\xe7'\n'\\xe9'\n'\\xe8'\n'\\xea'\n'\\xeb'\n'\\xed'\n'\\xec'\n'\\xee'\n'\\xef'\n'\\xf1'\n'\\xf3'\n'\\xf2'\n'\\xf4'\n'\\xf6'\n'\\xf5'\n'\\xfa'\n'\\xf9'\n'\\xfb'\n'\\xfc'\n'\\u2020'\n'\\xb0'\n'\\xa2'\n'\\xa3'\n'\\xa7'\n'\\u2022'\n'\\xb6'\n'\\xdf'\n'\\xae'\n'\\xa9'\n'\\u2122'\n'\\xb4'\n'\\xa8'\n'\\u2260'\n'\\u0102'\n'\\u0218'\n'\\u221e'\n'\\xb1'\n'\\u2264'\n'\\u2265'\n'\\xa5'\n'\\xb5'\n'\\u2202'\n'\\u2211'\n'\\u220f'\n'\\u03c0'\n'\\u222b'\n'\\xaa'\n'\\xba'\n'\\u03a9'\n'\\u0103'\n'\\u0219'\n'\\xbf'\n'\\xa1'\n'\\xac'\n'\\u221a'\n'\\u0192'\n'\\u2248'\n'\\u2206'\n'\\xab'\n'\\xbb'\n'\\u2026'\n'\\xa0'\n'\\xc0'\n'\\xc3'\n'\\xd5'\n'\\u0152'\n'\\u0153'\n'\\u2013'\n'\\u2014'\n'\\u201c'\n'\\u201d'\n'\\u2018'\n'\\u2019'\n'\\xf7'\n'\\u25ca'\n'\\xff'\n'\\u0178'\n'\\u2044'\n'\\u20ac'\n'\\u2039'\n'\\u203a'\n'\\u021a'\n'\\u021b'\n'\\u2021'\n'\\xb7'\n'\\u201a'\n'\\u201e'\n'\\u2030'\n'\\xc2'\n'\\xca'\n'\\xc1'\n'\\xcb'\n'\\xc8'\n'\\xcd'\n'\\xce'\n'\\xcf'\n'\\xcc'\n'\\xd3'\n'\\xd4'\n'\\uf8ff'\n'\\xd2'\n'\\xda'\n'\\xdb'\n'\\xd9'\n'\\u0131'\n'\\u02c6'\n'\\u02dc'\n'\\xaf'\n'\\u02d8'\n'\\u02d9'\n'\\u02da'\n'\\xb8'\n'\\u02dd'\n'\\u02db'\n'\\u02c7'\n)\n\n\nencoding_table=codecs.charmap_build(decoding_table)\n"], "asyncio.base_events": [".py", "''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\nimport collections\nimport concurrent.futures\nimport heapq\nimport inspect\nimport logging\nimport os\nimport socket\nimport subprocess\nimport threading\nimport time\nimport traceback\nimport sys\nimport warnings\n\nfrom .import coroutines\nfrom .import events\nfrom .import futures\nfrom .import tasks\nfrom .coroutines import coroutine\nfrom .log import logger\n\n\n__all__=['BaseEventLoop']\n\n\n\n_MAX_WORKERS=5\n\n\n\n_MIN_SCHEDULED_TIMER_HANDLES=100\n\n\n\n_MIN_CANCELLED_TIMER_HANDLES_FRACTION=0.5\n\ndef _format_handle(handle):\n cb=handle._callback\n if inspect.ismethod(cb)and isinstance(cb.__self__,tasks.Task):\n \n return repr(cb.__self__)\n else :\n return str(handle)\n \n \ndef _format_pipe(fd):\n if fd ==subprocess.PIPE:\n return''\n elif fd ==subprocess.STDOUT:\n return''\n else :\n return repr(fd)\n \n \nclass _StopError(BaseException):\n ''\n \n \ndef _check_resolved_address(sock,address):\n\n\n\n\n\n family=sock.family\n \n if family ==socket.AF_INET:\n host,port=address\n elif family ==socket.AF_INET6:\n host,port=address[:2]\n else :\n return\n \n \n if hasattr(socket,'inet_pton'):\n \n \n try :\n socket.inet_pton(family,host)\n except OSError as exc:\n raise ValueError(\"address must be resolved (IP address), \"\n \"got host %r: %s\"\n %(host,exc))\n else :\n \n \n type_mask=0\n if hasattr(socket,'SOCK_NONBLOCK'):\n type_mask |=socket.SOCK_NONBLOCK\n if hasattr(socket,'SOCK_CLOEXEC'):\n type_mask |=socket.SOCK_CLOEXEC\n try :\n socket.getaddrinfo(host,port,\n family=family,\n type=(sock.type&~type_mask),\n proto=sock.proto,\n flags=socket.AI_NUMERICHOST)\n except socket.gaierror as err:\n raise ValueError(\"address must be resolved (IP address), \"\n \"got host %r: %s\"\n %(host,err))\n \ndef _raise_stop_error(*args):\n raise _StopError\n \n \ndef _run_until_complete_cb(fut):\n exc=fut._exception\n if (isinstance(exc,BaseException)\n and not isinstance(exc,Exception)):\n \n \n return\n _raise_stop_error()\n \n \nclass Server(events.AbstractServer):\n\n def __init__(self,loop,sockets):\n self._loop=loop\n self.sockets=sockets\n self._active_count=0\n self._waiters=[]\n \n def __repr__(self):\n return'<%s sockets=%r>'%(self.__class__.__name__,self.sockets)\n \n def _attach(self):\n assert self.sockets is not None\n self._active_count +=1\n \n def _detach(self):\n assert self._active_count >0\n self._active_count -=1\n if self._active_count ==0 and self.sockets is None :\n self._wakeup()\n \n def close(self):\n sockets=self.sockets\n if sockets is None :\n return\n self.sockets=None\n for sock in sockets:\n self._loop._stop_serving(sock)\n if self._active_count ==0:\n self._wakeup()\n \n def _wakeup(self):\n waiters=self._waiters\n self._waiters=None\n for waiter in waiters:\n if not waiter.done():\n waiter.set_result(waiter)\n \n @coroutine\n def wait_closed(self):\n if self.sockets is None or self._waiters is None :\n return\n waiter=futures.Future(loop=self._loop)\n self._waiters.append(waiter)\n yield from waiter\n \n \nclass BaseEventLoop(events.AbstractEventLoop):\n\n def __init__(self):\n self._timer_cancelled_count=0\n self._closed=False\n self._ready=collections.deque()\n self._scheduled=[]\n self._default_executor=None\n self._internal_fds=0\n \n \n self._thread_id=None\n self._clock_resolution=time.get_clock_info('monotonic').resolution\n self._exception_handler=None\n self._debug=(not sys.flags.ignore_environment\n and bool(os.environ.get('PYTHONASYNCIODEBUG')))\n \n \n self.slow_callback_duration=0.1\n self._current_handle=None\n \n def __repr__(self):\n return ('<%s running=%s closed=%s debug=%s>'\n %(self.__class__.__name__,self.is_running(),\n self.is_closed(),self.get_debug()))\n \n def create_task(self,coro):\n ''\n\n\n \n self._check_closed()\n task=tasks.Task(coro,loop=self)\n if task._source_traceback:\n del task._source_traceback[-1]\n return task\n \n def _make_socket_transport(self,sock,protocol,waiter=None ,*,\n extra=None ,server=None ):\n ''\n raise NotImplementedError\n \n def _make_ssl_transport(self,rawsock,protocol,sslcontext,waiter=None ,\n *,server_side=False ,server_hostname=None ,\n extra=None ,server=None ):\n ''\n raise NotImplementedError\n \n def _make_datagram_transport(self,sock,protocol,\n address=None ,waiter=None ,extra=None ):\n ''\n raise NotImplementedError\n \n def _make_read_pipe_transport(self,pipe,protocol,waiter=None ,\n extra=None ):\n ''\n raise NotImplementedError\n \n def _make_write_pipe_transport(self,pipe,protocol,waiter=None ,\n extra=None ):\n ''\n raise NotImplementedError\n \n @coroutine\n def _make_subprocess_transport(self,protocol,args,shell,\n stdin,stdout,stderr,bufsize,\n extra=None ,**kwargs):\n ''\n raise NotImplementedError\n \n def _write_to_self(self):\n ''\n\n\n\n\n \n raise NotImplementedError\n \n def _process_events(self,event_list):\n ''\n raise NotImplementedError\n \n def _check_closed(self):\n if self._closed:\n raise RuntimeError('Event loop is closed')\n \n def run_forever(self):\n ''\n self._check_closed()\n if self.is_running():\n raise RuntimeError('Event loop is running.')\n self._thread_id=threading.get_ident()\n try :\n while True :\n try :\n self._run_once()\n except _StopError:\n break\n finally :\n self._thread_id=None\n \n def run_until_complete(self,future):\n ''\n\n\n\n\n\n\n\n\n \n self._check_closed()\n \n new_task=not isinstance(future,futures.Future)\n future=tasks.async(future,loop=self)\n if new_task:\n \n \n future._log_destroy_pending=False\n \n future.add_done_callback(_run_until_complete_cb)\n try :\n self.run_forever()\n except :\n if new_task and future.done()and not future.cancelled():\n \n \n \n future.exception()\n raise\n future.remove_done_callback(_run_until_complete_cb)\n if not future.done():\n raise RuntimeError('Event loop stopped before Future completed.')\n \n return future.result()\n \n def stop(self):\n ''\n\n\n\n\n \n self.call_soon(_raise_stop_error)\n \n def close(self):\n ''\n\n\n\n\n\n \n if self.is_running():\n raise RuntimeError(\"Cannot close a running event loop\")\n if self._closed:\n return\n if self._debug:\n logger.debug(\"Close %r\",self)\n self._closed=True\n self._ready.clear()\n self._scheduled.clear()\n executor=self._default_executor\n if executor is not None :\n self._default_executor=None\n executor.shutdown(wait=False )\n \n def is_closed(self):\n ''\n return self._closed\n \n \n \n \n if sys.version_info >=(3,4):\n def __del__(self):\n if not self.is_closed():\n warnings.warn(\"unclosed event loop %r\"%self,ResourceWarning)\n if not self.is_running():\n self.close()\n \n def is_running(self):\n ''\n return (self._thread_id is not None )\n \n def time(self):\n ''\n\n\n\n\n \n return time.monotonic()\n \n def call_later(self,delay,callback,*args):\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n timer=self.call_at(self.time()+delay,callback,*args)\n if timer._source_traceback:\n del timer._source_traceback[-1]\n return timer\n \n def call_at(self,when,callback,*args):\n ''\n\n\n \n if (coroutines.iscoroutine(callback)\n or coroutines.iscoroutinefunction(callback)):\n raise TypeError(\"coroutines cannot be used with call_at()\")\n self._check_closed()\n if self._debug:\n self._check_thread()\n timer=events.TimerHandle(when,callback,args,self)\n if timer._source_traceback:\n del timer._source_traceback[-1]\n heapq.heappush(self._scheduled,timer)\n timer._scheduled=True\n return timer\n \n def call_soon(self,callback,*args):\n ''\n\n\n\n\n\n\n\n \n if self._debug:\n self._check_thread()\n handle=self._call_soon(callback,args)\n if handle._source_traceback:\n del handle._source_traceback[-1]\n return handle\n \n def _call_soon(self,callback,args):\n if (coroutines.iscoroutine(callback)\n or coroutines.iscoroutinefunction(callback)):\n raise TypeError(\"coroutines cannot be used with call_soon()\")\n self._check_closed()\n handle=events.Handle(callback,args,self)\n if handle._source_traceback:\n del handle._source_traceback[-1]\n self._ready.append(handle)\n return handle\n \n def _check_thread(self):\n ''\n\n\n\n\n\n\n \n if self._thread_id is None :\n return\n thread_id=threading.get_ident()\n if thread_id !=self._thread_id:\n raise RuntimeError(\n \"Non-thread-safe operation invoked on an event loop other \"\n \"than the current one\")\n \n def call_soon_threadsafe(self,callback,*args):\n ''\n handle=self._call_soon(callback,args)\n if handle._source_traceback:\n del handle._source_traceback[-1]\n self._write_to_self()\n return handle\n \n def run_in_executor(self,executor,callback,*args):\n if (coroutines.iscoroutine(callback)\n or coroutines.iscoroutinefunction(callback)):\n raise TypeError(\"coroutines cannot be used with run_in_executor()\")\n self._check_closed()\n if isinstance(callback,events.Handle):\n assert not args\n assert not isinstance(callback,events.TimerHandle)\n if callback._cancelled:\n f=futures.Future(loop=self)\n f.set_result(None )\n return f\n callback,args=callback._callback,callback._args\n if executor is None :\n executor=self._default_executor\n if executor is None :\n executor=concurrent.futures.ThreadPoolExecutor(_MAX_WORKERS)\n self._default_executor=executor\n return futures.wrap_future(executor.submit(callback,*args),loop=self)\n \n def set_default_executor(self,executor):\n self._default_executor=executor\n \n def _getaddrinfo_debug(self,host,port,family,type,proto,flags):\n msg=[\"%s:%r\"%(host,port)]\n if family:\n msg.append('family=%r'%family)\n if type:\n msg.append('type=%r'%type)\n if proto:\n msg.append('proto=%r'%proto)\n if flags:\n msg.append('flags=%r'%flags)\n msg=', '.join(msg)\n logger.debug('Get address info %s',msg)\n \n t0=self.time()\n addrinfo=socket.getaddrinfo(host,port,family,type,proto,flags)\n dt=self.time()-t0\n \n msg=('Getting address info %s took %.3f ms: %r'\n %(msg,dt *1e3,addrinfo))\n if dt >=self.slow_callback_duration:\n logger.info(msg)\n else :\n logger.debug(msg)\n return addrinfo\n \n def getaddrinfo(self,host,port,*,\n family=0,type=0,proto=0,flags=0):\n if self._debug:\n return self.run_in_executor(None ,self._getaddrinfo_debug,\n host,port,family,type,proto,flags)\n else :\n return self.run_in_executor(None ,socket.getaddrinfo,\n host,port,family,type,proto,flags)\n \n def getnameinfo(self,sockaddr,flags=0):\n return self.run_in_executor(None ,socket.getnameinfo,sockaddr,flags)\n \n @coroutine\n def create_connection(self,protocol_factory,host=None ,port=None ,*,\n ssl=None ,family=0,proto=0,flags=0,sock=None ,\n local_addr=None ,server_hostname=None ):\n ''\n\n\n\n\n\n\n\n\n\n \n if server_hostname is not None and not ssl:\n raise ValueError('server_hostname is only meaningful with ssl')\n \n if server_hostname is None and ssl:\n \n \n \n \n \n \n \n \n \n \n if not host:\n raise ValueError('You must set server_hostname '\n 'when using ssl without a host')\n server_hostname=host\n \n if host is not None or port is not None :\n if sock is not None :\n raise ValueError(\n 'host/port and sock can not be specified at the same time')\n \n f1=self.getaddrinfo(\n host,port,family=family,\n type=socket.SOCK_STREAM,proto=proto,flags=flags)\n fs=[f1]\n if local_addr is not None :\n f2=self.getaddrinfo(\n *local_addr,family=family,\n type=socket.SOCK_STREAM,proto=proto,flags=flags)\n fs.append(f2)\n else :\n f2=None\n \n yield from tasks.wait(fs,loop=self)\n \n infos=f1.result()\n if not infos:\n raise OSError('getaddrinfo() returned empty list')\n if f2 is not None :\n laddr_infos=f2.result()\n if not laddr_infos:\n raise OSError('getaddrinfo() returned empty list')\n \n exceptions=[]\n for family,type,proto,cname,address in infos:\n try :\n sock=socket.socket(family=family,type=type,proto=proto)\n sock.setblocking(False )\n if f2 is not None :\n for _,_,_,_,laddr in laddr_infos:\n try :\n sock.bind(laddr)\n break\n except OSError as exc:\n exc=OSError(\n exc.errno,'error while '\n 'attempting to bind on address '\n '{!r}: {}'.format(\n laddr,exc.strerror.lower()))\n exceptions.append(exc)\n else :\n sock.close()\n sock=None\n continue\n if self._debug:\n logger.debug(\"connect %r to %r\",sock,address)\n yield from self.sock_connect(sock,address)\n except OSError as exc:\n if sock is not None :\n sock.close()\n exceptions.append(exc)\n except :\n if sock is not None :\n sock.close()\n raise\n else :\n break\n else :\n if len(exceptions)==1:\n raise exceptions[0]\n else :\n \n model=str(exceptions[0])\n if all(str(exc)==model for exc in exceptions):\n raise exceptions[0]\n \n \n raise OSError('Multiple exceptions: {}'.format(\n ', '.join(str(exc)for exc in exceptions)))\n \n elif sock is None :\n raise ValueError(\n 'host and port was not specified and no sock specified')\n \n sock.setblocking(False )\n \n transport,protocol=yield from self._create_connection_transport(\n sock,protocol_factory,ssl,server_hostname)\n if self._debug:\n \n \n sock=transport.get_extra_info('socket')\n logger.debug(\"%r connected to %s:%r: (%r, %r)\",\n sock,host,port,transport,protocol)\n return transport,protocol\n \n @coroutine\n def _create_connection_transport(self,sock,protocol_factory,ssl,\n server_hostname):\n protocol=protocol_factory()\n waiter=futures.Future(loop=self)\n if ssl:\n sslcontext=None if isinstance(ssl,bool)else ssl\n transport=self._make_ssl_transport(\n sock,protocol,sslcontext,waiter,\n server_side=False ,server_hostname=server_hostname)\n else :\n transport=self._make_socket_transport(sock,protocol,waiter)\n \n try :\n yield from waiter\n except :\n transport.close()\n raise\n \n return transport,protocol\n \n @coroutine\n def create_datagram_endpoint(self,protocol_factory,\n local_addr=None ,remote_addr=None ,*,\n family=0,proto=0,flags=0):\n ''\n if not (local_addr or remote_addr):\n if family ==0:\n raise ValueError('unexpected address family')\n addr_pairs_info=(((family,proto),(None ,None )),)\n else :\n \n addr_infos=collections.OrderedDict()\n for idx,addr in ((0,local_addr),(1,remote_addr)):\n if addr is not None :\n assert isinstance(addr,tuple)and len(addr)==2,(\n '2-tuple is expected')\n \n infos=yield from self.getaddrinfo(\n *addr,family=family,type=socket.SOCK_DGRAM,\n proto=proto,flags=flags)\n if not infos:\n raise OSError('getaddrinfo() returned empty list')\n \n for fam,_,pro,_,address in infos:\n key=(fam,pro)\n if key not in addr_infos:\n addr_infos[key]=[None ,None ]\n addr_infos[key][idx]=address\n \n \n addr_pairs_info=[\n (key,addr_pair)for key,addr_pair in addr_infos.items()\n if not ((local_addr and addr_pair[0]is None )or\n (remote_addr and addr_pair[1]is None ))]\n \n if not addr_pairs_info:\n raise ValueError('can not get address information')\n \n exceptions=[]\n \n for ((family,proto),\n (local_address,remote_address))in addr_pairs_info:\n sock=None\n r_addr=None\n try :\n sock=socket.socket(\n family=family,type=socket.SOCK_DGRAM,proto=proto)\n sock.setsockopt(socket.SOL_SOCKET,socket.SO_REUSEADDR,1)\n sock.setblocking(False )\n \n if local_addr:\n sock.bind(local_address)\n if remote_addr:\n yield from self.sock_connect(sock,remote_address)\n r_addr=remote_address\n except OSError as exc:\n if sock is not None :\n sock.close()\n exceptions.append(exc)\n except :\n if sock is not None :\n sock.close()\n raise\n else :\n break\n else :\n raise exceptions[0]\n \n protocol=protocol_factory()\n waiter=futures.Future(loop=self)\n transport=self._make_datagram_transport(sock,protocol,r_addr,\n waiter)\n if self._debug:\n if local_addr:\n logger.info(\"Datagram endpoint local_addr=%r remote_addr=%r \"\n \"created: (%r, %r)\",\n local_addr,remote_addr,transport,protocol)\n else :\n logger.debug(\"Datagram endpoint remote_addr=%r created: \"\n \"(%r, %r)\",\n remote_addr,transport,protocol)\n \n try :\n yield from waiter\n except :\n transport.close()\n raise\n \n return transport,protocol\n \n @coroutine\n def create_server(self,protocol_factory,host=None ,port=None ,\n *,\n family=socket.AF_UNSPEC,\n flags=socket.AI_PASSIVE,\n sock=None ,\n backlog=100,\n ssl=None ,\n reuse_address=None ):\n ''\n\n\n\n\n \n if isinstance(ssl,bool):\n raise TypeError('ssl argument must be an SSLContext or None')\n if host is not None or port is not None :\n if sock is not None :\n raise ValueError(\n 'host/port and sock can not be specified at the same time')\n \n AF_INET6=getattr(socket,'AF_INET6',0)\n if reuse_address is None :\n reuse_address=os.name =='posix'and sys.platform !='cygwin'\n sockets=[]\n if host =='':\n host=None\n \n infos=yield from self.getaddrinfo(\n host,port,family=family,\n type=socket.SOCK_STREAM,proto=0,flags=flags)\n if not infos:\n raise OSError('getaddrinfo() returned empty list')\n \n completed=False\n try :\n for res in infos:\n af,socktype,proto,canonname,sa=res\n try :\n sock=socket.socket(af,socktype,proto)\n except socket.error:\n \n if self._debug:\n logger.warning('create_server() failed to create '\n 'socket.socket(%r, %r, %r)',\n af,socktype,proto,exc_info=True )\n continue\n sockets.append(sock)\n if reuse_address:\n sock.setsockopt(socket.SOL_SOCKET,socket.SO_REUSEADDR,\n True )\n \n \n \n if af ==AF_INET6 and hasattr(socket,'IPPROTO_IPV6'):\n sock.setsockopt(socket.IPPROTO_IPV6,\n socket.IPV6_V6ONLY,\n True )\n try :\n sock.bind(sa)\n except OSError as err:\n raise OSError(err.errno,'error while attempting '\n 'to bind on address %r: %s'\n %(sa,err.strerror.lower()))\n completed=True\n finally :\n if not completed:\n for sock in sockets:\n sock.close()\n else :\n if sock is None :\n raise ValueError('Neither host/port nor sock were specified')\n sockets=[sock]\n \n server=Server(self,sockets)\n for sock in sockets:\n sock.listen(backlog)\n sock.setblocking(False )\n self._start_serving(protocol_factory,sock,ssl,server)\n if self._debug:\n logger.info(\"%r is serving\",server)\n return server\n \n @coroutine\n def connect_read_pipe(self,protocol_factory,pipe):\n protocol=protocol_factory()\n waiter=futures.Future(loop=self)\n transport=self._make_read_pipe_transport(pipe,protocol,waiter)\n \n try :\n yield from waiter\n except :\n transport.close()\n raise\n \n if self._debug:\n logger.debug('Read pipe %r connected: (%r, %r)',\n pipe.fileno(),transport,protocol)\n return transport,protocol\n \n @coroutine\n def connect_write_pipe(self,protocol_factory,pipe):\n protocol=protocol_factory()\n waiter=futures.Future(loop=self)\n transport=self._make_write_pipe_transport(pipe,protocol,waiter)\n \n try :\n yield from waiter\n except :\n transport.close()\n raise\n \n if self._debug:\n logger.debug('Write pipe %r connected: (%r, %r)',\n pipe.fileno(),transport,protocol)\n return transport,protocol\n \n def _log_subprocess(self,msg,stdin,stdout,stderr):\n info=[msg]\n if stdin is not None :\n info.append('stdin=%s'%_format_pipe(stdin))\n if stdout is not None and stderr ==subprocess.STDOUT:\n info.append('stdout=stderr=%s'%_format_pipe(stdout))\n else :\n if stdout is not None :\n info.append('stdout=%s'%_format_pipe(stdout))\n if stderr is not None :\n info.append('stderr=%s'%_format_pipe(stderr))\n logger.debug(' '.join(info))\n \n @coroutine\n def subprocess_shell(self,protocol_factory,cmd,*,stdin=subprocess.PIPE,\n stdout=subprocess.PIPE,stderr=subprocess.PIPE,\n universal_newlines=False ,shell=True ,bufsize=0,\n **kwargs):\n if not isinstance(cmd,(bytes,str)):\n raise ValueError(\"cmd must be a string\")\n if universal_newlines:\n raise ValueError(\"universal_newlines must be False\")\n if not shell:\n raise ValueError(\"shell must be True\")\n if bufsize !=0:\n raise ValueError(\"bufsize must be 0\")\n protocol=protocol_factory()\n if self._debug:\n \n \n debug_log='run shell command %r'%cmd\n self._log_subprocess(debug_log,stdin,stdout,stderr)\n transport=yield from self._make_subprocess_transport(\n protocol,cmd,True ,stdin,stdout,stderr,bufsize,**kwargs)\n if self._debug:\n logger.info('%s: %r'%(debug_log,transport))\n return transport,protocol\n \n @coroutine\n def subprocess_exec(self,protocol_factory,program,*args,\n stdin=subprocess.PIPE,stdout=subprocess.PIPE,\n stderr=subprocess.PIPE,universal_newlines=False ,\n shell=False ,bufsize=0,**kwargs):\n if universal_newlines:\n raise ValueError(\"universal_newlines must be False\")\n if shell:\n raise ValueError(\"shell must be False\")\n if bufsize !=0:\n raise ValueError(\"bufsize must be 0\")\n popen_args=(program,)+args\n for arg in popen_args:\n if not isinstance(arg,(str,bytes)):\n raise TypeError(\"program arguments must be \"\n \"a bytes or text string, not %s\"\n %type(arg).__name__)\n protocol=protocol_factory()\n if self._debug:\n \n \n debug_log='execute program %r'%program\n self._log_subprocess(debug_log,stdin,stdout,stderr)\n transport=yield from self._make_subprocess_transport(\n protocol,popen_args,False ,stdin,stdout,stderr,\n bufsize,**kwargs)\n if self._debug:\n logger.info('%s: %r'%(debug_log,transport))\n return transport,protocol\n \n def set_exception_handler(self,handler):\n ''\n\n\n\n\n\n\n\n\n\n \n if handler is not None and not callable(handler):\n raise TypeError('A callable object or None is expected, '\n 'got {!r}'.format(handler))\n self._exception_handler=handler\n \n def default_exception_handler(self,context):\n ''\n\n\n\n\n\n\n\n \n message=context.get('message')\n if not message:\n message='Unhandled exception in event loop'\n \n exception=context.get('exception')\n if exception is not None :\n \n \n exc_info=(type(exception),exception)\n else :\n exc_info=False\n \n if ('source_traceback'not in context\n and self._current_handle is not None\n and self._current_handle._source_traceback):\n context['handle_traceback']=self._current_handle._source_traceback\n \n log_lines=[message]\n for key in sorted(context):\n if key in {'message','exception'}:\n continue\n value=context[key]\n if key =='source_traceback':\n tb=''.join(traceback.format_list(value))\n value='Object created at (most recent call last):\\n'\n value +=tb.rstrip()\n elif key =='handle_traceback':\n tb=''.join(traceback.format_list(value))\n value='Handle created at (most recent call last):\\n'\n value +=tb.rstrip()\n else :\n value=repr(value)\n log_lines.append('{}: {}'.format(key,value))\n \n logger.error('\\n'.join(log_lines),exc_info=exc_info)\n \n def call_exception_handler(self,context):\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n if self._exception_handler is None :\n try :\n self.default_exception_handler(context)\n except Exception:\n \n \n \n logger.error('Exception in default exception handler',\n exc_info=True )\n else :\n try :\n self._exception_handler(self,context)\n except Exception as exc:\n \n try :\n \n self.default_exception_handler({\n 'message':'Unhandled error in exception handler',\n 'exception':exc,\n 'context':context,\n })\n except Exception:\n \n \n logger.error('Exception in default exception handler '\n 'while handling an unexpected error '\n 'in custom exception handler',\n exc_info=True )\n \n def _add_callback(self,handle):\n ''\n assert isinstance(handle,events.Handle),'A Handle is required here'\n if handle._cancelled:\n return\n assert not isinstance(handle,events.TimerHandle)\n self._ready.append(handle)\n \n def _add_callback_signalsafe(self,handle):\n ''\n self._add_callback(handle)\n self._write_to_self()\n \n def _timer_handle_cancelled(self,handle):\n ''\n if handle._scheduled:\n self._timer_cancelled_count +=1\n \n def _run_once(self):\n ''\n\n\n\n\n \n \n sched_count=len(self._scheduled)\n if (sched_count >_MIN_SCHEDULED_TIMER_HANDLES and\n self._timer_cancelled_count /sched_count >\n _MIN_CANCELLED_TIMER_HANDLES_FRACTION):\n \n \n new_scheduled=[]\n for handle in self._scheduled:\n if handle._cancelled:\n handle._scheduled=False\n else :\n new_scheduled.append(handle)\n \n heapq.heapify(new_scheduled)\n self._scheduled=new_scheduled\n self._timer_cancelled_count=0\n else :\n \n while self._scheduled and self._scheduled[0]._cancelled:\n self._timer_cancelled_count -=1\n handle=heapq.heappop(self._scheduled)\n handle._scheduled=False\n \n timeout=None\n if self._ready:\n timeout=0\n elif self._scheduled:\n \n when=self._scheduled[0]._when\n timeout=max(0,when -self.time())\n \n if self._debug and timeout !=0:\n t0=self.time()\n event_list=self._selector.select(timeout)\n dt=self.time()-t0\n if dt >=1.0:\n level=logging.INFO\n else :\n level=logging.DEBUG\n nevent=len(event_list)\n if timeout is None :\n logger.log(level,'poll took %.3f ms: %s events',\n dt *1e3,nevent)\n elif nevent:\n logger.log(level,\n 'poll %.3f ms took %.3f ms: %s events',\n timeout *1e3,dt *1e3,nevent)\n elif dt >=1.0:\n logger.log(level,\n 'poll %.3f ms took %.3f ms: timeout',\n timeout *1e3,dt *1e3)\n else :\n event_list=self._selector.select(timeout)\n self._process_events(event_list)\n \n \n end_time=self.time()+self._clock_resolution\n while self._scheduled:\n handle=self._scheduled[0]\n if handle._when >=end_time:\n break\n handle=heapq.heappop(self._scheduled)\n handle._scheduled=False\n self._ready.append(handle)\n \n \n \n \n \n \n \n ntodo=len(self._ready)\n for i in range(ntodo):\n handle=self._ready.popleft()\n if handle._cancelled:\n continue\n if self._debug:\n try :\n self._current_handle=handle\n t0=self.time()\n handle._run()\n dt=self.time()-t0\n if dt >=self.slow_callback_duration:\n logger.warning('Executing %s took %.3f seconds',\n _format_handle(handle),dt)\n finally :\n self._current_handle=None\n else :\n handle._run()\n handle=None\n \n def get_debug(self):\n return self._debug\n \n def set_debug(self,enabled):\n self._debug=enabled\n"], "encodings.cp866": [".py", "''\n\n\n\nimport codecs\n\n\n\nclass Codec(codecs.Codec):\n\n def encode(self,input,errors='strict'):\n return codecs.charmap_encode(input,errors,encoding_map)\n \n def decode(self,input,errors='strict'):\n return codecs.charmap_decode(input,errors,decoding_table)\n \nclass IncrementalEncoder(codecs.IncrementalEncoder):\n def encode(self,input,final=False ):\n return codecs.charmap_encode(input,self.errors,encoding_map)[0]\n \nclass IncrementalDecoder(codecs.IncrementalDecoder):\n def decode(self,input,final=False ):\n return codecs.charmap_decode(input,self.errors,decoding_table)[0]\n \nclass StreamWriter(Codec,codecs.StreamWriter):\n pass\n \nclass StreamReader(Codec,codecs.StreamReader):\n pass\n \n \n \ndef getregentry():\n return codecs.CodecInfo(\n name='cp866',\n encode=Codec().encode,\n decode=Codec().decode,\n incrementalencoder=IncrementalEncoder,\n incrementaldecoder=IncrementalDecoder,\n streamreader=StreamReader,\n streamwriter=StreamWriter,\n )\n \n \n \ndecoding_map=codecs.make_identity_dict(range(256))\ndecoding_map.update({\n0x0080:0x0410,\n0x0081:0x0411,\n0x0082:0x0412,\n0x0083:0x0413,\n0x0084:0x0414,\n0x0085:0x0415,\n0x0086:0x0416,\n0x0087:0x0417,\n0x0088:0x0418,\n0x0089:0x0419,\n0x008a:0x041a,\n0x008b:0x041b,\n0x008c:0x041c,\n0x008d:0x041d,\n0x008e:0x041e,\n0x008f:0x041f,\n0x0090:0x0420,\n0x0091:0x0421,\n0x0092:0x0422,\n0x0093:0x0423,\n0x0094:0x0424,\n0x0095:0x0425,\n0x0096:0x0426,\n0x0097:0x0427,\n0x0098:0x0428,\n0x0099:0x0429,\n0x009a:0x042a,\n0x009b:0x042b,\n0x009c:0x042c,\n0x009d:0x042d,\n0x009e:0x042e,\n0x009f:0x042f,\n0x00a0:0x0430,\n0x00a1:0x0431,\n0x00a2:0x0432,\n0x00a3:0x0433,\n0x00a4:0x0434,\n0x00a5:0x0435,\n0x00a6:0x0436,\n0x00a7:0x0437,\n0x00a8:0x0438,\n0x00a9:0x0439,\n0x00aa:0x043a,\n0x00ab:0x043b,\n0x00ac:0x043c,\n0x00ad:0x043d,\n0x00ae:0x043e,\n0x00af:0x043f,\n0x00b0:0x2591,\n0x00b1:0x2592,\n0x00b2:0x2593,\n0x00b3:0x2502,\n0x00b4:0x2524,\n0x00b5:0x2561,\n0x00b6:0x2562,\n0x00b7:0x2556,\n0x00b8:0x2555,\n0x00b9:0x2563,\n0x00ba:0x2551,\n0x00bb:0x2557,\n0x00bc:0x255d,\n0x00bd:0x255c,\n0x00be:0x255b,\n0x00bf:0x2510,\n0x00c0:0x2514,\n0x00c1:0x2534,\n0x00c2:0x252c,\n0x00c3:0x251c,\n0x00c4:0x2500,\n0x00c5:0x253c,\n0x00c6:0x255e,\n0x00c7:0x255f,\n0x00c8:0x255a,\n0x00c9:0x2554,\n0x00ca:0x2569,\n0x00cb:0x2566,\n0x00cc:0x2560,\n0x00cd:0x2550,\n0x00ce:0x256c,\n0x00cf:0x2567,\n0x00d0:0x2568,\n0x00d1:0x2564,\n0x00d2:0x2565,\n0x00d3:0x2559,\n0x00d4:0x2558,\n0x00d5:0x2552,\n0x00d6:0x2553,\n0x00d7:0x256b,\n0x00d8:0x256a,\n0x00d9:0x2518,\n0x00da:0x250c,\n0x00db:0x2588,\n0x00dc:0x2584,\n0x00dd:0x258c,\n0x00de:0x2590,\n0x00df:0x2580,\n0x00e0:0x0440,\n0x00e1:0x0441,\n0x00e2:0x0442,\n0x00e3:0x0443,\n0x00e4:0x0444,\n0x00e5:0x0445,\n0x00e6:0x0446,\n0x00e7:0x0447,\n0x00e8:0x0448,\n0x00e9:0x0449,\n0x00ea:0x044a,\n0x00eb:0x044b,\n0x00ec:0x044c,\n0x00ed:0x044d,\n0x00ee:0x044e,\n0x00ef:0x044f,\n0x00f0:0x0401,\n0x00f1:0x0451,\n0x00f2:0x0404,\n0x00f3:0x0454,\n0x00f4:0x0407,\n0x00f5:0x0457,\n0x00f6:0x040e,\n0x00f7:0x045e,\n0x00f8:0x00b0,\n0x00f9:0x2219,\n0x00fa:0x00b7,\n0x00fb:0x221a,\n0x00fc:0x2116,\n0x00fd:0x00a4,\n0x00fe:0x25a0,\n0x00ff:0x00a0,\n})\n\n\n\ndecoding_table=(\n'\\x00'\n'\\x01'\n'\\x02'\n'\\x03'\n'\\x04'\n'\\x05'\n'\\x06'\n'\\x07'\n'\\x08'\n'\\t'\n'\\n'\n'\\x0b'\n'\\x0c'\n'\\r'\n'\\x0e'\n'\\x0f'\n'\\x10'\n'\\x11'\n'\\x12'\n'\\x13'\n'\\x14'\n'\\x15'\n'\\x16'\n'\\x17'\n'\\x18'\n'\\x19'\n'\\x1a'\n'\\x1b'\n'\\x1c'\n'\\x1d'\n'\\x1e'\n'\\x1f'\n' '\n'!'\n'\"'\n'#'\n'$'\n'%'\n'&'\n\"'\"\n'('\n')'\n'*'\n'+'\n','\n'-'\n'.'\n'/'\n'0'\n'1'\n'2'\n'3'\n'4'\n'5'\n'6'\n'7'\n'8'\n'9'\n':'\n';'\n'<'\n'='\n'>'\n'?'\n'@'\n'A'\n'B'\n'C'\n'D'\n'E'\n'F'\n'G'\n'H'\n'I'\n'J'\n'K'\n'L'\n'M'\n'N'\n'O'\n'P'\n'Q'\n'R'\n'S'\n'T'\n'U'\n'V'\n'W'\n'X'\n'Y'\n'Z'\n'['\n'\\\\'\n']'\n'^'\n'_'\n'`'\n'a'\n'b'\n'c'\n'd'\n'e'\n'f'\n'g'\n'h'\n'i'\n'j'\n'k'\n'l'\n'm'\n'n'\n'o'\n'p'\n'q'\n'r'\n's'\n't'\n'u'\n'v'\n'w'\n'x'\n'y'\n'z'\n'{'\n'|'\n'}'\n'~'\n'\\x7f'\n'\\u0410'\n'\\u0411'\n'\\u0412'\n'\\u0413'\n'\\u0414'\n'\\u0415'\n'\\u0416'\n'\\u0417'\n'\\u0418'\n'\\u0419'\n'\\u041a'\n'\\u041b'\n'\\u041c'\n'\\u041d'\n'\\u041e'\n'\\u041f'\n'\\u0420'\n'\\u0421'\n'\\u0422'\n'\\u0423'\n'\\u0424'\n'\\u0425'\n'\\u0426'\n'\\u0427'\n'\\u0428'\n'\\u0429'\n'\\u042a'\n'\\u042b'\n'\\u042c'\n'\\u042d'\n'\\u042e'\n'\\u042f'\n'\\u0430'\n'\\u0431'\n'\\u0432'\n'\\u0433'\n'\\u0434'\n'\\u0435'\n'\\u0436'\n'\\u0437'\n'\\u0438'\n'\\u0439'\n'\\u043a'\n'\\u043b'\n'\\u043c'\n'\\u043d'\n'\\u043e'\n'\\u043f'\n'\\u2591'\n'\\u2592'\n'\\u2593'\n'\\u2502'\n'\\u2524'\n'\\u2561'\n'\\u2562'\n'\\u2556'\n'\\u2555'\n'\\u2563'\n'\\u2551'\n'\\u2557'\n'\\u255d'\n'\\u255c'\n'\\u255b'\n'\\u2510'\n'\\u2514'\n'\\u2534'\n'\\u252c'\n'\\u251c'\n'\\u2500'\n'\\u253c'\n'\\u255e'\n'\\u255f'\n'\\u255a'\n'\\u2554'\n'\\u2569'\n'\\u2566'\n'\\u2560'\n'\\u2550'\n'\\u256c'\n'\\u2567'\n'\\u2568'\n'\\u2564'\n'\\u2565'\n'\\u2559'\n'\\u2558'\n'\\u2552'\n'\\u2553'\n'\\u256b'\n'\\u256a'\n'\\u2518'\n'\\u250c'\n'\\u2588'\n'\\u2584'\n'\\u258c'\n'\\u2590'\n'\\u2580'\n'\\u0440'\n'\\u0441'\n'\\u0442'\n'\\u0443'\n'\\u0444'\n'\\u0445'\n'\\u0446'\n'\\u0447'\n'\\u0448'\n'\\u0449'\n'\\u044a'\n'\\u044b'\n'\\u044c'\n'\\u044d'\n'\\u044e'\n'\\u044f'\n'\\u0401'\n'\\u0451'\n'\\u0404'\n'\\u0454'\n'\\u0407'\n'\\u0457'\n'\\u040e'\n'\\u045e'\n'\\xb0'\n'\\u2219'\n'\\xb7'\n'\\u221a'\n'\\u2116'\n'\\xa4'\n'\\u25a0'\n'\\xa0'\n)\n\n\n\nencoding_map={\n0x0000:0x0000,\n0x0001:0x0001,\n0x0002:0x0002,\n0x0003:0x0003,\n0x0004:0x0004,\n0x0005:0x0005,\n0x0006:0x0006,\n0x0007:0x0007,\n0x0008:0x0008,\n0x0009:0x0009,\n0x000a:0x000a,\n0x000b:0x000b,\n0x000c:0x000c,\n0x000d:0x000d,\n0x000e:0x000e,\n0x000f:0x000f,\n0x0010:0x0010,\n0x0011:0x0011,\n0x0012:0x0012,\n0x0013:0x0013,\n0x0014:0x0014,\n0x0015:0x0015,\n0x0016:0x0016,\n0x0017:0x0017,\n0x0018:0x0018,\n0x0019:0x0019,\n0x001a:0x001a,\n0x001b:0x001b,\n0x001c:0x001c,\n0x001d:0x001d,\n0x001e:0x001e,\n0x001f:0x001f,\n0x0020:0x0020,\n0x0021:0x0021,\n0x0022:0x0022,\n0x0023:0x0023,\n0x0024:0x0024,\n0x0025:0x0025,\n0x0026:0x0026,\n0x0027:0x0027,\n0x0028:0x0028,\n0x0029:0x0029,\n0x002a:0x002a,\n0x002b:0x002b,\n0x002c:0x002c,\n0x002d:0x002d,\n0x002e:0x002e,\n0x002f:0x002f,\n0x0030:0x0030,\n0x0031:0x0031,\n0x0032:0x0032,\n0x0033:0x0033,\n0x0034:0x0034,\n0x0035:0x0035,\n0x0036:0x0036,\n0x0037:0x0037,\n0x0038:0x0038,\n0x0039:0x0039,\n0x003a:0x003a,\n0x003b:0x003b,\n0x003c:0x003c,\n0x003d:0x003d,\n0x003e:0x003e,\n0x003f:0x003f,\n0x0040:0x0040,\n0x0041:0x0041,\n0x0042:0x0042,\n0x0043:0x0043,\n0x0044:0x0044,\n0x0045:0x0045,\n0x0046:0x0046,\n0x0047:0x0047,\n0x0048:0x0048,\n0x0049:0x0049,\n0x004a:0x004a,\n0x004b:0x004b,\n0x004c:0x004c,\n0x004d:0x004d,\n0x004e:0x004e,\n0x004f:0x004f,\n0x0050:0x0050,\n0x0051:0x0051,\n0x0052:0x0052,\n0x0053:0x0053,\n0x0054:0x0054,\n0x0055:0x0055,\n0x0056:0x0056,\n0x0057:0x0057,\n0x0058:0x0058,\n0x0059:0x0059,\n0x005a:0x005a,\n0x005b:0x005b,\n0x005c:0x005c,\n0x005d:0x005d,\n0x005e:0x005e,\n0x005f:0x005f,\n0x0060:0x0060,\n0x0061:0x0061,\n0x0062:0x0062,\n0x0063:0x0063,\n0x0064:0x0064,\n0x0065:0x0065,\n0x0066:0x0066,\n0x0067:0x0067,\n0x0068:0x0068,\n0x0069:0x0069,\n0x006a:0x006a,\n0x006b:0x006b,\n0x006c:0x006c,\n0x006d:0x006d,\n0x006e:0x006e,\n0x006f:0x006f,\n0x0070:0x0070,\n0x0071:0x0071,\n0x0072:0x0072,\n0x0073:0x0073,\n0x0074:0x0074,\n0x0075:0x0075,\n0x0076:0x0076,\n0x0077:0x0077,\n0x0078:0x0078,\n0x0079:0x0079,\n0x007a:0x007a,\n0x007b:0x007b,\n0x007c:0x007c,\n0x007d:0x007d,\n0x007e:0x007e,\n0x007f:0x007f,\n0x00a0:0x00ff,\n0x00a4:0x00fd,\n0x00b0:0x00f8,\n0x00b7:0x00fa,\n0x0401:0x00f0,\n0x0404:0x00f2,\n0x0407:0x00f4,\n0x040e:0x00f6,\n0x0410:0x0080,\n0x0411:0x0081,\n0x0412:0x0082,\n0x0413:0x0083,\n0x0414:0x0084,\n0x0415:0x0085,\n0x0416:0x0086,\n0x0417:0x0087,\n0x0418:0x0088,\n0x0419:0x0089,\n0x041a:0x008a,\n0x041b:0x008b,\n0x041c:0x008c,\n0x041d:0x008d,\n0x041e:0x008e,\n0x041f:0x008f,\n0x0420:0x0090,\n0x0421:0x0091,\n0x0422:0x0092,\n0x0423:0x0093,\n0x0424:0x0094,\n0x0425:0x0095,\n0x0426:0x0096,\n0x0427:0x0097,\n0x0428:0x0098,\n0x0429:0x0099,\n0x042a:0x009a,\n0x042b:0x009b,\n0x042c:0x009c,\n0x042d:0x009d,\n0x042e:0x009e,\n0x042f:0x009f,\n0x0430:0x00a0,\n0x0431:0x00a1,\n0x0432:0x00a2,\n0x0433:0x00a3,\n0x0434:0x00a4,\n0x0435:0x00a5,\n0x0436:0x00a6,\n0x0437:0x00a7,\n0x0438:0x00a8,\n0x0439:0x00a9,\n0x043a:0x00aa,\n0x043b:0x00ab,\n0x043c:0x00ac,\n0x043d:0x00ad,\n0x043e:0x00ae,\n0x043f:0x00af,\n0x0440:0x00e0,\n0x0441:0x00e1,\n0x0442:0x00e2,\n0x0443:0x00e3,\n0x0444:0x00e4,\n0x0445:0x00e5,\n0x0446:0x00e6,\n0x0447:0x00e7,\n0x0448:0x00e8,\n0x0449:0x00e9,\n0x044a:0x00ea,\n0x044b:0x00eb,\n0x044c:0x00ec,\n0x044d:0x00ed,\n0x044e:0x00ee,\n0x044f:0x00ef,\n0x0451:0x00f1,\n0x0454:0x00f3,\n0x0457:0x00f5,\n0x045e:0x00f7,\n0x2116:0x00fc,\n0x2219:0x00f9,\n0x221a:0x00fb,\n0x2500:0x00c4,\n0x2502:0x00b3,\n0x250c:0x00da,\n0x2510:0x00bf,\n0x2514:0x00c0,\n0x2518:0x00d9,\n0x251c:0x00c3,\n0x2524:0x00b4,\n0x252c:0x00c2,\n0x2534:0x00c1,\n0x253c:0x00c5,\n0x2550:0x00cd,\n0x2551:0x00ba,\n0x2552:0x00d5,\n0x2553:0x00d6,\n0x2554:0x00c9,\n0x2555:0x00b8,\n0x2556:0x00b7,\n0x2557:0x00bb,\n0x2558:0x00d4,\n0x2559:0x00d3,\n0x255a:0x00c8,\n0x255b:0x00be,\n0x255c:0x00bd,\n0x255d:0x00bc,\n0x255e:0x00c6,\n0x255f:0x00c7,\n0x2560:0x00cc,\n0x2561:0x00b5,\n0x2562:0x00b6,\n0x2563:0x00b9,\n0x2564:0x00d1,\n0x2565:0x00d2,\n0x2566:0x00cb,\n0x2567:0x00cf,\n0x2568:0x00d0,\n0x2569:0x00ca,\n0x256a:0x00d8,\n0x256b:0x00d7,\n0x256c:0x00ce,\n0x2580:0x00df,\n0x2584:0x00dc,\n0x2588:0x00db,\n0x258c:0x00dd,\n0x2590:0x00de,\n0x2591:0x00b0,\n0x2592:0x00b1,\n0x2593:0x00b2,\n0x25a0:0x00fe,\n}\n"], "xml.sax.xmlreader": [".py", "''\n\n\nfrom .import handler\n\nfrom ._exceptions import SAXNotSupportedException,SAXNotRecognizedException\n\n\n\n\nclass XMLReader:\n ''\n\n\n\n\n\n\n\n\n\n \n \n def __init__(self):\n self._cont_handler=handler.ContentHandler()\n self._dtd_handler=handler.DTDHandler()\n self._ent_handler=handler.EntityResolver()\n self._err_handler=handler.ErrorHandler()\n \n def parse(self,source):\n ''\n raise NotImplementedError(\"This method must be implemented!\")\n \n def getContentHandler(self):\n ''\n return self._cont_handler\n \n def setContentHandler(self,handler):\n ''\n self._cont_handler=handler\n \n def getDTDHandler(self):\n ''\n return self._dtd_handler\n \n def setDTDHandler(self,handler):\n ''\n self._dtd_handler=handler\n \n def getEntityResolver(self):\n ''\n return self._ent_handler\n \n def setEntityResolver(self,resolver):\n ''\n self._ent_handler=resolver\n \n def getErrorHandler(self):\n ''\n return self._err_handler\n \n def setErrorHandler(self,handler):\n ''\n self._err_handler=handler\n \n def setLocale(self,locale):\n ''\n\n\n\n\n \n raise SAXNotSupportedException(\"Locale support not implemented\")\n \n def getFeature(self,name):\n ''\n raise SAXNotRecognizedException(\"Feature '%s' not recognized\"%name)\n \n def setFeature(self,name,state):\n ''\n raise SAXNotRecognizedException(\"Feature '%s' not recognized\"%name)\n \n def getProperty(self,name):\n ''\n raise SAXNotRecognizedException(\"Property '%s' not recognized\"%name)\n \n def setProperty(self,name,value):\n ''\n raise SAXNotRecognizedException(\"Property '%s' not recognized\"%name)\n \nclass IncrementalParser(XMLReader):\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n \n def __init__(self,bufsize=2 **16):\n self._bufsize=bufsize\n XMLReader.__init__(self)\n \n def parse(self,source):\n from .import saxutils\n source=saxutils.prepare_input_source(source)\n \n self.prepareParser(source)\n file=source.getByteStream()\n buffer=file.read(self._bufsize)\n while buffer:\n self.feed(buffer)\n buffer=file.read(self._bufsize)\n self.close()\n \n def feed(self,data):\n ''\n\n\n\n\n \n raise NotImplementedError(\"This method must be implemented!\")\n \n def prepareParser(self,source):\n ''\n \n raise NotImplementedError(\"prepareParser must be overridden!\")\n \n def close(self):\n ''\n\n\n\n\n\n\n\n\n \n raise NotImplementedError(\"This method must be implemented!\")\n \n def reset(self):\n ''\n\n\n \n raise NotImplementedError(\"This method must be implemented!\")\n \n \n \nclass Locator:\n ''\n\n\n \n \n def getColumnNumber(self):\n ''\n return -1\n \n def getLineNumber(self):\n ''\n return -1\n \n def getPublicId(self):\n ''\n return None\n \n def getSystemId(self):\n ''\n return None\n \n \n \nclass InputSource:\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n \n \n def __init__(self,system_id=None ):\n self.__system_id=system_id\n self.__public_id=None\n self.__encoding=None\n self.__bytefile=None\n self.__charfile=None\n \n def setPublicId(self,public_id):\n ''\n self.__public_id=public_id\n \n def getPublicId(self):\n ''\n return self.__public_id\n \n def setSystemId(self,system_id):\n ''\n self.__system_id=system_id\n \n def getSystemId(self):\n ''\n return self.__system_id\n \n def setEncoding(self,encoding):\n ''\n\n\n\n\n\n \n self.__encoding=encoding\n \n def getEncoding(self):\n ''\n return self.__encoding\n \n def setByteStream(self,bytefile):\n ''\n\n\n\n\n\n\n\n\n \n self.__bytefile=bytefile\n \n def getByteStream(self):\n ''\n\n\n \n return self.__bytefile\n \n def setCharacterStream(self,charfile):\n ''\n\n\n\n\n\n \n self.__charfile=charfile\n \n def getCharacterStream(self):\n ''\n return self.__charfile\n \n \n \nclass AttributesImpl:\n\n def __init__(self,attrs):\n ''\n\n \n self._attrs=attrs\n \n def getLength(self):\n return len(self._attrs)\n \n def getType(self,name):\n return\"CDATA\"\n \n def getValue(self,name):\n return self._attrs[name]\n \n def getValueByQName(self,name):\n return self._attrs[name]\n \n def getNameByQName(self,name):\n if name not in self._attrs:\n raise KeyError(name)\n return name\n \n def getQNameByName(self,name):\n if name not in self._attrs:\n raise KeyError(name)\n return name\n \n def getNames(self):\n return list(self._attrs.keys())\n \n def getQNames(self):\n return list(self._attrs.keys())\n \n def __len__(self):\n return len(self._attrs)\n \n def __getitem__(self,name):\n return self._attrs[name]\n \n def keys(self):\n return list(self._attrs.keys())\n \n def __contains__(self,name):\n return name in self._attrs\n \n def get(self,name,alternative=None ):\n return self._attrs.get(name,alternative)\n \n def copy(self):\n return self.__class__(self._attrs)\n \n def items(self):\n return list(self._attrs.items())\n \n def values(self):\n return list(self._attrs.values())\n \n \n \nclass AttributesNSImpl(AttributesImpl):\n\n def __init__(self,attrs,qnames):\n ''\n\n\n \n self._attrs=attrs\n self._qnames=qnames\n \n def getValueByQName(self,name):\n for (nsname,qname)in self._qnames.items():\n if qname ==name:\n return self._attrs[nsname]\n \n raise KeyError(name)\n \n def getNameByQName(self,name):\n for (nsname,qname)in self._qnames.items():\n if qname ==name:\n return nsname\n \n raise KeyError(name)\n \n def getQNameByName(self,name):\n return self._qnames[name]\n \n def getQNames(self):\n return list(self._qnames.values())\n \n def copy(self):\n return self.__class__(self._attrs,self._qnames)\n \n \ndef _test():\n XMLReader()\n IncrementalParser()\n Locator()\n \nif __name__ ==\"__main__\":\n _test()\n"], "asyncio.locks": [".py", "''\n\n__all__=['Lock','Event','Condition','Semaphore','BoundedSemaphore']\n\nimport collections\n\nfrom .import events\nfrom .import futures\nfrom .coroutines import coroutine\n\n\nclass _ContextManager:\n ''\n\n\n\n\n\n\n\n\n\n\n\n \n \n def __init__(self,lock):\n self._lock=lock\n \n def __enter__(self):\n \n \n return None\n \n def __exit__(self,*args):\n try :\n self._lock.release()\n finally :\n self._lock=None\n \n \nclass Lock:\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n \n def __init__(self,*,loop=None ):\n self._waiters=collections.deque()\n self._locked=False\n if loop is not None :\n self._loop=loop\n else :\n self._loop=events.get_event_loop()\n \n def __repr__(self):\n res=super().__repr__()\n extra='locked'if self._locked else'unlocked'\n if self._waiters:\n extra='{},waiters:{}'.format(extra,len(self._waiters))\n return'<{} [{}]>'.format(res[1:-1],extra)\n \n def locked(self):\n ''\n return self._locked\n \n @coroutine\n def acquire(self):\n ''\n\n\n\n \n if not self._waiters and not self._locked:\n self._locked=True\n return True\n \n fut=futures.Future(loop=self._loop)\n self._waiters.append(fut)\n try :\n yield from fut\n self._locked=True\n return True\n finally :\n self._waiters.remove(fut)\n \n def release(self):\n ''\n\n\n\n\n\n\n\n\n \n if self._locked:\n self._locked=False\n \n for fut in self._waiters:\n if not fut.done():\n fut.set_result(True )\n break\n else :\n raise RuntimeError('Lock is not acquired.')\n \n def __enter__(self):\n raise RuntimeError(\n '\"yield from\" should be used as context manager expression')\n \n def __exit__(self,*args):\n \n \n pass\n \n def __iter__(self):\n \n \n \n \n \n \n \n \n \n \n \n \n yield from self.acquire()\n return _ContextManager(self)\n \n \nclass Event:\n ''\n\n\n\n\n\n \n \n def __init__(self,*,loop=None ):\n self._waiters=collections.deque()\n self._value=False\n if loop is not None :\n self._loop=loop\n else :\n self._loop=events.get_event_loop()\n \n def __repr__(self):\n res=super().__repr__()\n extra='set'if self._value else'unset'\n if self._waiters:\n extra='{},waiters:{}'.format(extra,len(self._waiters))\n return'<{} [{}]>'.format(res[1:-1],extra)\n \n def is_set(self):\n ''\n return self._value\n \n def set(self):\n ''\n\n\n \n if not self._value:\n self._value=True\n \n for fut in self._waiters:\n if not fut.done():\n fut.set_result(True )\n \n def clear(self):\n ''\n\n \n self._value=False\n \n @coroutine\n def wait(self):\n ''\n\n\n\n\n \n if self._value:\n return True\n \n fut=futures.Future(loop=self._loop)\n self._waiters.append(fut)\n try :\n yield from fut\n return True\n finally :\n self._waiters.remove(fut)\n \n \nclass Condition:\n ''\n\n\n\n\n\n\n \n \n def __init__(self,lock=None ,*,loop=None ):\n if loop is not None :\n self._loop=loop\n else :\n self._loop=events.get_event_loop()\n \n if lock is None :\n lock=Lock(loop=self._loop)\n elif lock._loop is not self._loop:\n raise ValueError(\"loop argument must agree with lock\")\n \n self._lock=lock\n \n self.locked=lock.locked\n self.acquire=lock.acquire\n self.release=lock.release\n \n self._waiters=collections.deque()\n \n def __repr__(self):\n res=super().__repr__()\n extra='locked'if self.locked()else'unlocked'\n if self._waiters:\n extra='{},waiters:{}'.format(extra,len(self._waiters))\n return'<{} [{}]>'.format(res[1:-1],extra)\n \n @coroutine\n def wait(self):\n ''\n\n\n\n\n\n\n\n\n \n if not self.locked():\n raise RuntimeError('cannot wait on un-acquired lock')\n \n self.release()\n try :\n fut=futures.Future(loop=self._loop)\n self._waiters.append(fut)\n try :\n yield from fut\n return True\n finally :\n self._waiters.remove(fut)\n \n finally :\n yield from self.acquire()\n \n @coroutine\n def wait_for(self,predicate):\n ''\n\n\n\n\n \n result=predicate()\n while not result:\n yield from self.wait()\n result=predicate()\n return result\n \n def notify(self,n=1):\n ''\n\n\n\n\n\n\n\n\n\n \n if not self.locked():\n raise RuntimeError('cannot notify on un-acquired lock')\n \n idx=0\n for fut in self._waiters:\n if idx >=n:\n break\n \n if not fut.done():\n idx +=1\n fut.set_result(False )\n \n def notify_all(self):\n ''\n\n\n\n \n self.notify(len(self._waiters))\n \n def __enter__(self):\n raise RuntimeError(\n '\"yield from\" should be used as context manager expression')\n \n def __exit__(self,*args):\n pass\n \n def __iter__(self):\n \n yield from self.acquire()\n return _ContextManager(self)\n \n \nclass Semaphore:\n ''\n\n\n\n\n\n\n\n\n\n\n\n \n \n def __init__(self,value=1,*,loop=None ):\n if value <0:\n raise ValueError(\"Semaphore initial value must be >= 0\")\n self._value=value\n self._waiters=collections.deque()\n if loop is not None :\n self._loop=loop\n else :\n self._loop=events.get_event_loop()\n \n def __repr__(self):\n res=super().__repr__()\n extra='locked'if self.locked()else'unlocked,value:{}'.format(\n self._value)\n if self._waiters:\n extra='{},waiters:{}'.format(extra,len(self._waiters))\n return'<{} [{}]>'.format(res[1:-1],extra)\n \n def locked(self):\n ''\n return self._value ==0\n \n @coroutine\n def acquire(self):\n ''\n\n\n\n\n\n\n \n if not self._waiters and self._value >0:\n self._value -=1\n return True\n \n fut=futures.Future(loop=self._loop)\n self._waiters.append(fut)\n try :\n yield from fut\n self._value -=1\n return True\n finally :\n self._waiters.remove(fut)\n \n def release(self):\n ''\n\n\n \n self._value +=1\n for waiter in self._waiters:\n if not waiter.done():\n waiter.set_result(True )\n break\n \n def __enter__(self):\n raise RuntimeError(\n '\"yield from\" should be used as context manager expression')\n \n def __exit__(self,*args):\n pass\n \n def __iter__(self):\n \n yield from self.acquire()\n return _ContextManager(self)\n \n \nclass BoundedSemaphore(Semaphore):\n ''\n\n\n\n \n \n def __init__(self,value=1,*,loop=None ):\n self._bound_value=value\n super().__init__(value,loop=loop)\n \n def release(self):\n if self._value >=self._bound_value:\n raise ValueError('BoundedSemaphore released too many times')\n super().release()\n"], "threading": [".py", "''\n\nimport sys as _sys\nimport _thread\n\nfrom time import sleep as _sleep\ntry :\n from time import monotonic as _time\nexcept ImportError:\n from time import time as _time\nfrom traceback import format_exc as _format_exc\nfrom _weakrefset import WeakSet\n\n\n\n\n\n\n\n\n\n\n\n__all__=['active_count','Condition','current_thread','enumerate','Event',\n'Lock','RLock','Semaphore','BoundedSemaphore','Thread','Barrier',\n'Timer','ThreadError','setprofile','settrace','local','stack_size']\n\n\n_start_new_thread=_thread.start_new_thread\n_allocate_lock=_thread.allocate_lock\nget_ident=_thread.get_ident\nThreadError=_thread.error\ntry :\n _CRLock=_thread.RLock\nexcept AttributeError:\n _CRLock=None\nTIMEOUT_MAX=_thread.TIMEOUT_MAX\ndel _thread\n\n\n\n\n_profile_hook=None\n_trace_hook=None\n\ndef setprofile(func):\n ''\n\n\n\n\n \n global _profile_hook\n _profile_hook=func\n \ndef settrace(func):\n ''\n\n\n\n\n \n global _trace_hook\n _trace_hook=func\n \n \n \nLock=_allocate_lock\n\ndef RLock(*args,**kwargs):\n ''\n\n\n\n\n\n\n \n if _CRLock is None :\n return _PyRLock(*args,**kwargs)\n return _CRLock(*args,**kwargs)\n \nclass _RLock:\n ''\n\n\n\n\n\n\n \n \n def __init__(self):\n self._block=_allocate_lock()\n self._owner=None\n self._count=0\n \n def __repr__(self):\n owner=self._owner\n try :\n owner=_active[owner].name\n except KeyError:\n pass\n return\"<%s owner=%r count=%d>\"%(\n self.__class__.__name__,owner,self._count)\n \n def acquire(self,blocking=True ,timeout=-1):\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n me=get_ident()\n if self._owner ==me:\n self._count=self._count+1\n return 1\n rc=self._block.acquire(blocking,timeout)\n if rc:\n self._owner=me\n self._count=1\n return rc\n \n __enter__=acquire\n \n def release(self):\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n if self._owner !=get_ident():\n raise RuntimeError(\"cannot release un-acquired lock\")\n self._count=count=self._count -1\n if not count:\n self._owner=None\n self._block.release()\n \n def __exit__(self,t,v,tb):\n self.release()\n \n \n \n def _acquire_restore(self,state):\n self._block.acquire()\n self._count,self._owner=state\n \n def _release_save(self):\n if self._count ==0:\n raise RuntimeError(\"cannot release un-acquired lock\")\n count=self._count\n self._count=0\n owner=self._owner\n self._owner=None\n self._block.release()\n return (count,owner)\n \n def _is_owned(self):\n return self._owner ==get_ident()\n \n_PyRLock=_RLock\n\n\nclass Condition:\n ''\n\n\n\n\n\n\n\n\n \n \n def __init__(self,lock=None ):\n if lock is None :\n lock=RLock()\n self._lock=lock\n \n self.acquire=lock.acquire\n self.release=lock.release\n \n \n \n try :\n self._release_save=lock._release_save\n except AttributeError:\n pass\n try :\n self._acquire_restore=lock._acquire_restore\n except AttributeError:\n pass\n try :\n self._is_owned=lock._is_owned\n except AttributeError:\n pass\n self._waiters=[]\n \n def __enter__(self):\n return self._lock.__enter__()\n \n def __exit__(self,*args):\n return self._lock.__exit__(*args)\n \n def __repr__(self):\n return\"\"%(self._lock,len(self._waiters))\n \n def _release_save(self):\n self._lock.release()\n \n def _acquire_restore(self,x):\n self._lock.acquire()\n \n def _is_owned(self):\n \n \n if self._lock.acquire(0):\n self._lock.release()\n return False\n else :\n return True\n \n def wait(self,timeout=None ):\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n if not self._is_owned():\n raise RuntimeError(\"cannot wait on un-acquired lock\")\n waiter=_allocate_lock()\n waiter.acquire()\n self._waiters.append(waiter)\n saved_state=self._release_save()\n try :\n if timeout is None :\n waiter.acquire()\n gotit=True\n else :\n if timeout >0:\n gotit=waiter.acquire(True ,timeout)\n else :\n gotit=waiter.acquire(False )\n if not gotit:\n try :\n self._waiters.remove(waiter)\n except ValueError:\n pass\n return gotit\n finally :\n self._acquire_restore(saved_state)\n \n def wait_for(self,predicate,timeout=None ):\n ''\n\n\n\n\n\n \n endtime=None\n waittime=timeout\n result=predicate()\n while not result:\n if waittime is not None :\n if endtime is None :\n endtime=_time()+waittime\n else :\n waittime=endtime -_time()\n if waittime <=0:\n break\n self.wait(waittime)\n result=predicate()\n return result\n \n def notify(self,n=1):\n ''\n\n\n\n\n\n\n\n \n if not self._is_owned():\n raise RuntimeError(\"cannot notify on un-acquired lock\")\n __waiters=self._waiters\n waiters=__waiters[:n]\n if not waiters:\n return\n for waiter in waiters:\n waiter.release()\n try :\n __waiters.remove(waiter)\n except ValueError:\n pass\n \n def notify_all(self):\n ''\n\n\n\n\n \n self.notify(len(self._waiters))\n \n notifyAll=notify_all\n \n \nclass Semaphore:\n ''\n\n\n\n\n\n\n \n \n \n \n def __init__(self,value=1):\n if value <0:\n raise ValueError(\"semaphore initial value must be >= 0\")\n self._cond=Condition(Lock())\n self._value=value\n \n def acquire(self,blocking=True ,timeout=None ):\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n if not blocking and timeout is not None :\n raise ValueError(\"can't specify timeout for non-blocking acquire\")\n rc=False\n endtime=None\n with self._cond:\n while self._value ==0:\n if not blocking:\n break\n if timeout is not None :\n if endtime is None :\n endtime=_time()+timeout\n else :\n timeout=endtime -_time()\n if timeout <=0:\n break\n self._cond.wait(timeout)\n else :\n self._value=self._value -1\n rc=True\n return rc\n \n __enter__=acquire\n \n def release(self):\n ''\n\n\n\n\n \n with self._cond:\n self._value=self._value+1\n self._cond.notify()\n \n def __exit__(self,t,v,tb):\n self.release()\n \n \nclass BoundedSemaphore(Semaphore):\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n \n def __init__(self,value=1):\n Semaphore.__init__(self,value)\n self._initial_value=value\n \n def release(self):\n ''\n\n\n\n\n\n\n\n \n with self._cond:\n if self._value >=self._initial_value:\n raise ValueError(\"Semaphore released too many times\")\n self._value +=1\n self._cond.notify()\n \n \nclass Event:\n ''\n\n\n\n\n\n \n \n \n \n def __init__(self):\n self._cond=Condition(Lock())\n self._flag=False\n \n def _reset_internal_locks(self):\n \n self._cond.__init__()\n \n def is_set(self):\n ''\n return self._flag\n \n isSet=is_set\n \n def set(self):\n ''\n\n\n\n\n \n self._cond.acquire()\n try :\n self._flag=True\n self._cond.notify_all()\n finally :\n self._cond.release()\n \n def clear(self):\n ''\n\n\n\n\n \n self._cond.acquire()\n try :\n self._flag=False\n finally :\n self._cond.release()\n \n def wait(self,timeout=None ):\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n \n self._cond.acquire()\n try :\n signaled=self._flag\n if not signaled:\n signaled=self._cond.wait(timeout)\n return signaled\n finally :\n self._cond.release()\n \n \n \n \n \n \n \n \n \n \n \n \n \nclass Barrier:\n ''\n\n\n\n\n\n \n \n def __init__(self,parties,action=None ,timeout=None ):\n ''\n\n\n\n\n\n\n \n self._cond=Condition(Lock())\n self._action=action\n self._timeout=timeout\n self._parties=parties\n self._state=0\n self._count=0\n \n def wait(self,timeout=None ):\n ''\n\n\n\n\n\n\n \n if timeout is None :\n timeout=self._timeout\n with self._cond:\n self._enter()\n index=self._count\n self._count +=1\n try :\n if index+1 ==self._parties:\n \n self._release()\n else :\n \n self._wait(timeout)\n return index\n finally :\n self._count -=1\n \n self._exit()\n \n \n \n def _enter(self):\n while self._state in (-1,1):\n \n self._cond.wait()\n \n if self._state <0:\n raise BrokenBarrierError\n assert self._state ==0\n \n \n \n def _release(self):\n try :\n if self._action:\n self._action()\n \n self._state=1\n self._cond.notify_all()\n except :\n \n self._break()\n raise\n \n \n \n def _wait(self,timeout):\n if not self._cond.wait_for(lambda :self._state !=0,timeout):\n \n self._break()\n raise BrokenBarrierError\n if self._state <0:\n raise BrokenBarrierError\n assert self._state ==1\n \n \n \n def _exit(self):\n if self._count ==0:\n if self._state in (-1,1):\n \n self._state=0\n self._cond.notify_all()\n \n def reset(self):\n ''\n\n\n\n\n \n with self._cond:\n if self._count >0:\n if self._state ==0:\n \n self._state=-1\n elif self._state ==-2:\n \n \n self._state=-1\n else :\n self._state=0\n self._cond.notify_all()\n \n def abort(self):\n ''\n\n\n\n\n \n with self._cond:\n self._break()\n \n def _break(self):\n \n \n self._state=-2\n self._cond.notify_all()\n \n @property\n def parties(self):\n ''\n return self._parties\n \n @property\n def n_waiting(self):\n ''\n \n \n if self._state ==0:\n return self._count\n return 0\n \n @property\n def broken(self):\n ''\n return self._state ==-2\n \n \nclass BrokenBarrierError(RuntimeError):\n pass\n \n \n \n_counter=0\ndef _newname(template=\"Thread-%d\"):\n global _counter\n _counter=_counter+1\n return template %_counter\n \n \n_active_limbo_lock=_allocate_lock()\n_active={}\n_limbo={}\n\n\n_dangling=WeakSet()\n\n\n\nclass Thread:\n ''\n\n\n\n\n\n \n \n __initialized=False\n \n \n \n \n __exc_info=_sys.exc_info\n \n \n \n \n def __init__(self,group=None ,target=None ,name=None ,\n args=(),kwargs=None ,*,daemon=None ):\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n assert group is None ,\"group argument must be None for now\"\n if kwargs is None :\n kwargs={}\n self._target=target\n self._name=str(name or _newname())\n self._args=args\n self._kwargs=kwargs\n if daemon is not None :\n self._daemonic=daemon\n else :\n self._daemonic=current_thread().daemon\n self._ident=None\n self._started=Event()\n self._stopped=False\n self._block=Condition(Lock())\n self._initialized=True\n \n \n self._stderr=_sys.stderr\n _dangling.add(self)\n \n def _reset_internal_locks(self):\n \n \n if hasattr(self,'_block'):\n self._block.__init__()\n self._started._reset_internal_locks()\n \n def __repr__(self):\n assert self._initialized,\"Thread.__init__() was not called\"\n status=\"initial\"\n if self._started.is_set():\n status=\"started\"\n if self._stopped:\n status=\"stopped\"\n if self._daemonic:\n status +=\" daemon\"\n if self._ident is not None :\n status +=\" %s\"%self._ident\n return\"<%s(%s, %s)>\"%(self.__class__.__name__,self._name,status)\n \n def start(self):\n ''\n\n\n\n\n\n\n\n \n if not self._initialized:\n raise RuntimeError(\"thread.__init__() not called\")\n \n if self._started.is_set():\n raise RuntimeError(\"threads can only be started once\")\n with _active_limbo_lock:\n _limbo[self]=self\n try :\n _start_new_thread(self._bootstrap,())\n except Exception:\n with _active_limbo_lock:\n del _limbo[self]\n raise\n self._started.wait()\n \n def run(self):\n ''\n\n\n\n\n\n\n \n try :\n if self._target:\n self._target(*self._args,**self._kwargs)\n finally :\n \n \n del self._target,self._args,self._kwargs\n \n def _bootstrap(self):\n \n \n \n \n \n \n \n \n \n \n \n \n try :\n self._bootstrap_inner()\n except :\n if self._daemonic and _sys is None :\n return\n raise\n \n def _set_ident(self):\n self._ident=get_ident()\n \n def _bootstrap_inner(self):\n try :\n self._set_ident()\n self._started.set()\n with _active_limbo_lock:\n _active[self._ident]=self\n del _limbo[self]\n \n if _trace_hook:\n _sys.settrace(_trace_hook)\n if _profile_hook:\n _sys.setprofile(_profile_hook)\n \n try :\n self.run()\n except SystemExit:\n pass\n except :\n \n \n \n \n if _sys:\n _sys.stderr.write(\"Exception in thread %s:\\n%s\\n\"%\n (self.name,_format_exc()))\n else :\n \n \n \n exc_type,exc_value,exc_tb=self._exc_info()\n try :\n print((\n \"Exception in thread \"+self.name+\n \" (most likely raised during interpreter shutdown):\"),file=self._stderr)\n print((\n \"Traceback (most recent call last):\"),file=self._stderr)\n while exc_tb:\n print((\n ' File \"%s\", line %s, in %s'%\n (exc_tb.tb_frame.f_code.co_filename,\n exc_tb.tb_lineno,\n exc_tb.tb_frame.f_code.co_name)),file=self._stderr)\n exc_tb=exc_tb.tb_next\n print((\"%s: %s\"%(exc_type,exc_value)),file=self._stderr)\n \n \n finally :\n del exc_type,exc_value,exc_tb\n finally :\n \n \n \n \n \n pass\n finally :\n with _active_limbo_lock:\n self._stop()\n try :\n \n \n del _active[get_ident()]\n except :\n pass\n \n def _stop(self):\n self._block.acquire()\n self._stopped=True\n self._block.notify_all()\n self._block.release()\n \n def _delete(self):\n ''\n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n try :\n with _active_limbo_lock:\n del _active[get_ident()]\n \n \n \n \n except KeyError:\n if'dummy_threading'not in _sys.modules:\n raise\n \n def join(self,timeout=None ):\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n if not self._initialized:\n raise RuntimeError(\"Thread.__init__() not called\")\n if not self._started.is_set():\n raise RuntimeError(\"cannot join thread before it is started\")\n if self is current_thread():\n raise RuntimeError(\"cannot join current thread\")\n \n self._block.acquire()\n try :\n if timeout is None :\n while not self._stopped:\n self._block.wait()\n else :\n deadline=_time()+timeout\n while not self._stopped:\n delay=deadline -_time()\n if delay <=0:\n break\n self._block.wait(delay)\n finally :\n self._block.release()\n \n @property\n def name(self):\n ''\n\n\n\n\n \n assert self._initialized,\"Thread.__init__() not called\"\n return self._name\n \n @name.setter\n def name(self,name):\n assert self._initialized,\"Thread.__init__() not called\"\n self._name=str(name)\n \n @property\n def ident(self):\n ''\n\n\n\n\n\n \n assert self._initialized,\"Thread.__init__() not called\"\n return self._ident\n \n def is_alive(self):\n ''\n\n\n\n\n\n \n assert self._initialized,\"Thread.__init__() not called\"\n return self._started.is_set()and not self._stopped\n \n isAlive=is_alive\n \n @property\n def daemon(self):\n ''\n\n\n\n\n\n\n\n\n\n \n assert self._initialized,\"Thread.__init__() not called\"\n return self._daemonic\n \n @daemon.setter\n def daemon(self,daemonic):\n if not self._initialized:\n raise RuntimeError(\"Thread.__init__() not called\")\n if self._started.is_set():\n raise RuntimeError(\"cannot set daemon status of active thread\");\n self._daemonic=daemonic\n \n def isDaemon(self):\n return self.daemon\n \n def setDaemon(self,daemonic):\n self.daemon=daemonic\n \n def getName(self):\n return self.name\n \n def setName(self,name):\n self.name=name\n \n \n \nclass Timer(Thread):\n ''\n\n\n\n\n\n \n \n def __init__(self,interval,function,args=None ,kwargs=None ):\n Thread.__init__(self)\n self.interval=interval\n self.function=function\n self.args=args if args is not None else []\n self.kwargs=kwargs if kwargs is not None else {}\n self.finished=Event()\n \n def cancel(self):\n ''\n self.finished.set()\n \n def run(self):\n self.finished.wait(self.interval)\n if not self.finished.is_set():\n self.function(*self.args,**self.kwargs)\n self.finished.set()\n \n \n \n \nclass _MainThread(Thread):\n\n def __init__(self):\n Thread.__init__(self,name=\"MainThread\",daemon=False )\n self._started.set()\n self._set_ident()\n with _active_limbo_lock:\n _active[self._ident]=self\n \n def _exitfunc(self):\n self._stop()\n t=_pickSomeNonDaemonThread()\n while t:\n t.join()\n t=_pickSomeNonDaemonThread()\n self._delete()\n \ndef _pickSomeNonDaemonThread():\n for t in enumerate():\n if not t.daemon and t.is_alive():\n return t\n return None\n \n \n \n \n \n \n \n \n \n \nclass _DummyThread(Thread):\n\n def __init__(self):\n Thread.__init__(self,name=_newname(\"Dummy-%d\"),daemon=True )\n \n \n \n \n del self._block\n \n self._started.set()\n self._set_ident()\n with _active_limbo_lock:\n _active[self._ident]=self\n \n def _stop(self):\n pass\n \n def join(self,timeout=None ):\n assert False ,\"cannot join a dummy thread\"\n \n \n \n \ndef current_thread():\n ''\n\n\n\n\n \n try :\n return _active[get_ident()]\n except KeyError:\n return _DummyThread()\n \ncurrentThread=current_thread\n\ndef active_count():\n ''\n\n\n\n\n \n with _active_limbo_lock:\n return len(_active)+len(_limbo)\n \nactiveCount=active_count\n\ndef _enumerate():\n\n return list(_active.values())+list(_limbo.values())\n \ndef enumerate():\n ''\n\n\n\n\n\n \n with _active_limbo_lock:\n return list(_active.values())+list(_limbo.values())\n \nfrom _thread import stack_size\n\n\n\n\n\n_shutdown=_MainThread()._exitfunc\n\n\n\n\ntry :\n from _thread import _local as local\nexcept ImportError:\n from _threading_local import local\n \n \ndef _after_fork():\n\n\n\n\n\n\n global _active_limbo_lock\n _active_limbo_lock=_allocate_lock()\n \n \n new_active={}\n current=current_thread()\n with _active_limbo_lock:\n for thread in _enumerate():\n \n \n thread._reset_internal_locks()\n if thread is current:\n \n \n ident=get_ident()\n thread._ident=ident\n new_active[ident]=thread\n else :\n \n thread._stop()\n \n _limbo.clear()\n _active.clear()\n _active.update(new_active)\n assert len(_active)==1\n"], "encodings.cp852": [".py", "''\n\n\n\nimport codecs\n\n\n\nclass Codec(codecs.Codec):\n\n def encode(self,input,errors='strict'):\n return codecs.charmap_encode(input,errors,encoding_map)\n \n def decode(self,input,errors='strict'):\n return codecs.charmap_decode(input,errors,decoding_table)\n \nclass IncrementalEncoder(codecs.IncrementalEncoder):\n def encode(self,input,final=False ):\n return codecs.charmap_encode(input,self.errors,encoding_map)[0]\n \nclass IncrementalDecoder(codecs.IncrementalDecoder):\n def decode(self,input,final=False ):\n return codecs.charmap_decode(input,self.errors,decoding_table)[0]\n \nclass StreamWriter(Codec,codecs.StreamWriter):\n pass\n \nclass StreamReader(Codec,codecs.StreamReader):\n pass\n \n \n \ndef getregentry():\n return codecs.CodecInfo(\n name='cp852',\n encode=Codec().encode,\n decode=Codec().decode,\n incrementalencoder=IncrementalEncoder,\n incrementaldecoder=IncrementalDecoder,\n streamreader=StreamReader,\n streamwriter=StreamWriter,\n )\n \n \n \ndecoding_map=codecs.make_identity_dict(range(256))\ndecoding_map.update({\n0x0080:0x00c7,\n0x0081:0x00fc,\n0x0082:0x00e9,\n0x0083:0x00e2,\n0x0084:0x00e4,\n0x0085:0x016f,\n0x0086:0x0107,\n0x0087:0x00e7,\n0x0088:0x0142,\n0x0089:0x00eb,\n0x008a:0x0150,\n0x008b:0x0151,\n0x008c:0x00ee,\n0x008d:0x0179,\n0x008e:0x00c4,\n0x008f:0x0106,\n0x0090:0x00c9,\n0x0091:0x0139,\n0x0092:0x013a,\n0x0093:0x00f4,\n0x0094:0x00f6,\n0x0095:0x013d,\n0x0096:0x013e,\n0x0097:0x015a,\n0x0098:0x015b,\n0x0099:0x00d6,\n0x009a:0x00dc,\n0x009b:0x0164,\n0x009c:0x0165,\n0x009d:0x0141,\n0x009e:0x00d7,\n0x009f:0x010d,\n0x00a0:0x00e1,\n0x00a1:0x00ed,\n0x00a2:0x00f3,\n0x00a3:0x00fa,\n0x00a4:0x0104,\n0x00a5:0x0105,\n0x00a6:0x017d,\n0x00a7:0x017e,\n0x00a8:0x0118,\n0x00a9:0x0119,\n0x00aa:0x00ac,\n0x00ab:0x017a,\n0x00ac:0x010c,\n0x00ad:0x015f,\n0x00ae:0x00ab,\n0x00af:0x00bb,\n0x00b0:0x2591,\n0x00b1:0x2592,\n0x00b2:0x2593,\n0x00b3:0x2502,\n0x00b4:0x2524,\n0x00b5:0x00c1,\n0x00b6:0x00c2,\n0x00b7:0x011a,\n0x00b8:0x015e,\n0x00b9:0x2563,\n0x00ba:0x2551,\n0x00bb:0x2557,\n0x00bc:0x255d,\n0x00bd:0x017b,\n0x00be:0x017c,\n0x00bf:0x2510,\n0x00c0:0x2514,\n0x00c1:0x2534,\n0x00c2:0x252c,\n0x00c3:0x251c,\n0x00c4:0x2500,\n0x00c5:0x253c,\n0x00c6:0x0102,\n0x00c7:0x0103,\n0x00c8:0x255a,\n0x00c9:0x2554,\n0x00ca:0x2569,\n0x00cb:0x2566,\n0x00cc:0x2560,\n0x00cd:0x2550,\n0x00ce:0x256c,\n0x00cf:0x00a4,\n0x00d0:0x0111,\n0x00d1:0x0110,\n0x00d2:0x010e,\n0x00d3:0x00cb,\n0x00d4:0x010f,\n0x00d5:0x0147,\n0x00d6:0x00cd,\n0x00d7:0x00ce,\n0x00d8:0x011b,\n0x00d9:0x2518,\n0x00da:0x250c,\n0x00db:0x2588,\n0x00dc:0x2584,\n0x00dd:0x0162,\n0x00de:0x016e,\n0x00df:0x2580,\n0x00e0:0x00d3,\n0x00e1:0x00df,\n0x00e2:0x00d4,\n0x00e3:0x0143,\n0x00e4:0x0144,\n0x00e5:0x0148,\n0x00e6:0x0160,\n0x00e7:0x0161,\n0x00e8:0x0154,\n0x00e9:0x00da,\n0x00ea:0x0155,\n0x00eb:0x0170,\n0x00ec:0x00fd,\n0x00ed:0x00dd,\n0x00ee:0x0163,\n0x00ef:0x00b4,\n0x00f0:0x00ad,\n0x00f1:0x02dd,\n0x00f2:0x02db,\n0x00f3:0x02c7,\n0x00f4:0x02d8,\n0x00f5:0x00a7,\n0x00f6:0x00f7,\n0x00f7:0x00b8,\n0x00f8:0x00b0,\n0x00f9:0x00a8,\n0x00fa:0x02d9,\n0x00fb:0x0171,\n0x00fc:0x0158,\n0x00fd:0x0159,\n0x00fe:0x25a0,\n0x00ff:0x00a0,\n})\n\n\n\ndecoding_table=(\n'\\x00'\n'\\x01'\n'\\x02'\n'\\x03'\n'\\x04'\n'\\x05'\n'\\x06'\n'\\x07'\n'\\x08'\n'\\t'\n'\\n'\n'\\x0b'\n'\\x0c'\n'\\r'\n'\\x0e'\n'\\x0f'\n'\\x10'\n'\\x11'\n'\\x12'\n'\\x13'\n'\\x14'\n'\\x15'\n'\\x16'\n'\\x17'\n'\\x18'\n'\\x19'\n'\\x1a'\n'\\x1b'\n'\\x1c'\n'\\x1d'\n'\\x1e'\n'\\x1f'\n' '\n'!'\n'\"'\n'#'\n'$'\n'%'\n'&'\n\"'\"\n'('\n')'\n'*'\n'+'\n','\n'-'\n'.'\n'/'\n'0'\n'1'\n'2'\n'3'\n'4'\n'5'\n'6'\n'7'\n'8'\n'9'\n':'\n';'\n'<'\n'='\n'>'\n'?'\n'@'\n'A'\n'B'\n'C'\n'D'\n'E'\n'F'\n'G'\n'H'\n'I'\n'J'\n'K'\n'L'\n'M'\n'N'\n'O'\n'P'\n'Q'\n'R'\n'S'\n'T'\n'U'\n'V'\n'W'\n'X'\n'Y'\n'Z'\n'['\n'\\\\'\n']'\n'^'\n'_'\n'`'\n'a'\n'b'\n'c'\n'd'\n'e'\n'f'\n'g'\n'h'\n'i'\n'j'\n'k'\n'l'\n'm'\n'n'\n'o'\n'p'\n'q'\n'r'\n's'\n't'\n'u'\n'v'\n'w'\n'x'\n'y'\n'z'\n'{'\n'|'\n'}'\n'~'\n'\\x7f'\n'\\xc7'\n'\\xfc'\n'\\xe9'\n'\\xe2'\n'\\xe4'\n'\\u016f'\n'\\u0107'\n'\\xe7'\n'\\u0142'\n'\\xeb'\n'\\u0150'\n'\\u0151'\n'\\xee'\n'\\u0179'\n'\\xc4'\n'\\u0106'\n'\\xc9'\n'\\u0139'\n'\\u013a'\n'\\xf4'\n'\\xf6'\n'\\u013d'\n'\\u013e'\n'\\u015a'\n'\\u015b'\n'\\xd6'\n'\\xdc'\n'\\u0164'\n'\\u0165'\n'\\u0141'\n'\\xd7'\n'\\u010d'\n'\\xe1'\n'\\xed'\n'\\xf3'\n'\\xfa'\n'\\u0104'\n'\\u0105'\n'\\u017d'\n'\\u017e'\n'\\u0118'\n'\\u0119'\n'\\xac'\n'\\u017a'\n'\\u010c'\n'\\u015f'\n'\\xab'\n'\\xbb'\n'\\u2591'\n'\\u2592'\n'\\u2593'\n'\\u2502'\n'\\u2524'\n'\\xc1'\n'\\xc2'\n'\\u011a'\n'\\u015e'\n'\\u2563'\n'\\u2551'\n'\\u2557'\n'\\u255d'\n'\\u017b'\n'\\u017c'\n'\\u2510'\n'\\u2514'\n'\\u2534'\n'\\u252c'\n'\\u251c'\n'\\u2500'\n'\\u253c'\n'\\u0102'\n'\\u0103'\n'\\u255a'\n'\\u2554'\n'\\u2569'\n'\\u2566'\n'\\u2560'\n'\\u2550'\n'\\u256c'\n'\\xa4'\n'\\u0111'\n'\\u0110'\n'\\u010e'\n'\\xcb'\n'\\u010f'\n'\\u0147'\n'\\xcd'\n'\\xce'\n'\\u011b'\n'\\u2518'\n'\\u250c'\n'\\u2588'\n'\\u2584'\n'\\u0162'\n'\\u016e'\n'\\u2580'\n'\\xd3'\n'\\xdf'\n'\\xd4'\n'\\u0143'\n'\\u0144'\n'\\u0148'\n'\\u0160'\n'\\u0161'\n'\\u0154'\n'\\xda'\n'\\u0155'\n'\\u0170'\n'\\xfd'\n'\\xdd'\n'\\u0163'\n'\\xb4'\n'\\xad'\n'\\u02dd'\n'\\u02db'\n'\\u02c7'\n'\\u02d8'\n'\\xa7'\n'\\xf7'\n'\\xb8'\n'\\xb0'\n'\\xa8'\n'\\u02d9'\n'\\u0171'\n'\\u0158'\n'\\u0159'\n'\\u25a0'\n'\\xa0'\n)\n\n\n\nencoding_map={\n0x0000:0x0000,\n0x0001:0x0001,\n0x0002:0x0002,\n0x0003:0x0003,\n0x0004:0x0004,\n0x0005:0x0005,\n0x0006:0x0006,\n0x0007:0x0007,\n0x0008:0x0008,\n0x0009:0x0009,\n0x000a:0x000a,\n0x000b:0x000b,\n0x000c:0x000c,\n0x000d:0x000d,\n0x000e:0x000e,\n0x000f:0x000f,\n0x0010:0x0010,\n0x0011:0x0011,\n0x0012:0x0012,\n0x0013:0x0013,\n0x0014:0x0014,\n0x0015:0x0015,\n0x0016:0x0016,\n0x0017:0x0017,\n0x0018:0x0018,\n0x0019:0x0019,\n0x001a:0x001a,\n0x001b:0x001b,\n0x001c:0x001c,\n0x001d:0x001d,\n0x001e:0x001e,\n0x001f:0x001f,\n0x0020:0x0020,\n0x0021:0x0021,\n0x0022:0x0022,\n0x0023:0x0023,\n0x0024:0x0024,\n0x0025:0x0025,\n0x0026:0x0026,\n0x0027:0x0027,\n0x0028:0x0028,\n0x0029:0x0029,\n0x002a:0x002a,\n0x002b:0x002b,\n0x002c:0x002c,\n0x002d:0x002d,\n0x002e:0x002e,\n0x002f:0x002f,\n0x0030:0x0030,\n0x0031:0x0031,\n0x0032:0x0032,\n0x0033:0x0033,\n0x0034:0x0034,\n0x0035:0x0035,\n0x0036:0x0036,\n0x0037:0x0037,\n0x0038:0x0038,\n0x0039:0x0039,\n0x003a:0x003a,\n0x003b:0x003b,\n0x003c:0x003c,\n0x003d:0x003d,\n0x003e:0x003e,\n0x003f:0x003f,\n0x0040:0x0040,\n0x0041:0x0041,\n0x0042:0x0042,\n0x0043:0x0043,\n0x0044:0x0044,\n0x0045:0x0045,\n0x0046:0x0046,\n0x0047:0x0047,\n0x0048:0x0048,\n0x0049:0x0049,\n0x004a:0x004a,\n0x004b:0x004b,\n0x004c:0x004c,\n0x004d:0x004d,\n0x004e:0x004e,\n0x004f:0x004f,\n0x0050:0x0050,\n0x0051:0x0051,\n0x0052:0x0052,\n0x0053:0x0053,\n0x0054:0x0054,\n0x0055:0x0055,\n0x0056:0x0056,\n0x0057:0x0057,\n0x0058:0x0058,\n0x0059:0x0059,\n0x005a:0x005a,\n0x005b:0x005b,\n0x005c:0x005c,\n0x005d:0x005d,\n0x005e:0x005e,\n0x005f:0x005f,\n0x0060:0x0060,\n0x0061:0x0061,\n0x0062:0x0062,\n0x0063:0x0063,\n0x0064:0x0064,\n0x0065:0x0065,\n0x0066:0x0066,\n0x0067:0x0067,\n0x0068:0x0068,\n0x0069:0x0069,\n0x006a:0x006a,\n0x006b:0x006b,\n0x006c:0x006c,\n0x006d:0x006d,\n0x006e:0x006e,\n0x006f:0x006f,\n0x0070:0x0070,\n0x0071:0x0071,\n0x0072:0x0072,\n0x0073:0x0073,\n0x0074:0x0074,\n0x0075:0x0075,\n0x0076:0x0076,\n0x0077:0x0077,\n0x0078:0x0078,\n0x0079:0x0079,\n0x007a:0x007a,\n0x007b:0x007b,\n0x007c:0x007c,\n0x007d:0x007d,\n0x007e:0x007e,\n0x007f:0x007f,\n0x00a0:0x00ff,\n0x00a4:0x00cf,\n0x00a7:0x00f5,\n0x00a8:0x00f9,\n0x00ab:0x00ae,\n0x00ac:0x00aa,\n0x00ad:0x00f0,\n0x00b0:0x00f8,\n0x00b4:0x00ef,\n0x00b8:0x00f7,\n0x00bb:0x00af,\n0x00c1:0x00b5,\n0x00c2:0x00b6,\n0x00c4:0x008e,\n0x00c7:0x0080,\n0x00c9:0x0090,\n0x00cb:0x00d3,\n0x00cd:0x00d6,\n0x00ce:0x00d7,\n0x00d3:0x00e0,\n0x00d4:0x00e2,\n0x00d6:0x0099,\n0x00d7:0x009e,\n0x00da:0x00e9,\n0x00dc:0x009a,\n0x00dd:0x00ed,\n0x00df:0x00e1,\n0x00e1:0x00a0,\n0x00e2:0x0083,\n0x00e4:0x0084,\n0x00e7:0x0087,\n0x00e9:0x0082,\n0x00eb:0x0089,\n0x00ed:0x00a1,\n0x00ee:0x008c,\n0x00f3:0x00a2,\n0x00f4:0x0093,\n0x00f6:0x0094,\n0x00f7:0x00f6,\n0x00fa:0x00a3,\n0x00fc:0x0081,\n0x00fd:0x00ec,\n0x0102:0x00c6,\n0x0103:0x00c7,\n0x0104:0x00a4,\n0x0105:0x00a5,\n0x0106:0x008f,\n0x0107:0x0086,\n0x010c:0x00ac,\n0x010d:0x009f,\n0x010e:0x00d2,\n0x010f:0x00d4,\n0x0110:0x00d1,\n0x0111:0x00d0,\n0x0118:0x00a8,\n0x0119:0x00a9,\n0x011a:0x00b7,\n0x011b:0x00d8,\n0x0139:0x0091,\n0x013a:0x0092,\n0x013d:0x0095,\n0x013e:0x0096,\n0x0141:0x009d,\n0x0142:0x0088,\n0x0143:0x00e3,\n0x0144:0x00e4,\n0x0147:0x00d5,\n0x0148:0x00e5,\n0x0150:0x008a,\n0x0151:0x008b,\n0x0154:0x00e8,\n0x0155:0x00ea,\n0x0158:0x00fc,\n0x0159:0x00fd,\n0x015a:0x0097,\n0x015b:0x0098,\n0x015e:0x00b8,\n0x015f:0x00ad,\n0x0160:0x00e6,\n0x0161:0x00e7,\n0x0162:0x00dd,\n0x0163:0x00ee,\n0x0164:0x009b,\n0x0165:0x009c,\n0x016e:0x00de,\n0x016f:0x0085,\n0x0170:0x00eb,\n0x0171:0x00fb,\n0x0179:0x008d,\n0x017a:0x00ab,\n0x017b:0x00bd,\n0x017c:0x00be,\n0x017d:0x00a6,\n0x017e:0x00a7,\n0x02c7:0x00f3,\n0x02d8:0x00f4,\n0x02d9:0x00fa,\n0x02db:0x00f2,\n0x02dd:0x00f1,\n0x2500:0x00c4,\n0x2502:0x00b3,\n0x250c:0x00da,\n0x2510:0x00bf,\n0x2514:0x00c0,\n0x2518:0x00d9,\n0x251c:0x00c3,\n0x2524:0x00b4,\n0x252c:0x00c2,\n0x2534:0x00c1,\n0x253c:0x00c5,\n0x2550:0x00cd,\n0x2551:0x00ba,\n0x2554:0x00c9,\n0x2557:0x00bb,\n0x255a:0x00c8,\n0x255d:0x00bc,\n0x2560:0x00cc,\n0x2563:0x00b9,\n0x2566:0x00cb,\n0x2569:0x00ca,\n0x256c:0x00ce,\n0x2580:0x00df,\n0x2584:0x00dc,\n0x2588:0x00db,\n0x2591:0x00b0,\n0x2592:0x00b1,\n0x2593:0x00b2,\n0x25a0:0x00fe,\n}\n"], "asyncio.selector_events": [".py", "''\n\n\n\n\n\n__all__=['BaseSelectorEventLoop']\n\nimport collections\nimport errno\nimport functools\nimport socket\nimport sys\nimport warnings\ntry :\n import ssl\nexcept ImportError:\n ssl=None\n \nfrom .import base_events\nfrom .import constants\nfrom .import events\nfrom .import futures\nfrom .import selectors\nfrom .import transports\nfrom .import sslproto\nfrom .coroutines import coroutine\nfrom .log import logger\n\n\ndef _test_selector_event(selector,fd,event):\n\n\n try :\n key=selector.get_key(fd)\n except KeyError:\n return False\n else :\n return bool(key.events&event)\n \n \nclass BaseSelectorEventLoop(base_events.BaseEventLoop):\n ''\n\n\n \n \n def __init__(self,selector=None ):\n super().__init__()\n \n if selector is None :\n selector=selectors.DefaultSelector()\n logger.debug('Using selector: %s',selector.__class__.__name__)\n self._selector=selector\n self._make_self_pipe()\n \n def _make_socket_transport(self,sock,protocol,waiter=None ,*,\n extra=None ,server=None ):\n return _SelectorSocketTransport(self,sock,protocol,waiter,\n extra,server)\n \n def _make_ssl_transport(self,rawsock,protocol,sslcontext,waiter=None ,\n *,server_side=False ,server_hostname=None ,\n extra=None ,server=None ):\n if not sslproto._is_sslproto_available():\n return self._make_legacy_ssl_transport(\n rawsock,protocol,sslcontext,waiter,\n server_side=server_side,server_hostname=server_hostname,\n extra=extra,server=server)\n \n ssl_protocol=sslproto.SSLProtocol(self,protocol,sslcontext,waiter,\n server_side,server_hostname)\n _SelectorSocketTransport(self,rawsock,ssl_protocol,\n extra=extra,server=server)\n return ssl_protocol._app_transport\n \n def _make_legacy_ssl_transport(self,rawsock,protocol,sslcontext,\n waiter,*,\n server_side=False ,server_hostname=None ,\n extra=None ,server=None ):\n \n \n return _SelectorSslTransport(\n self,rawsock,protocol,sslcontext,waiter,\n server_side,server_hostname,extra,server)\n \n def _make_datagram_transport(self,sock,protocol,\n address=None ,waiter=None ,extra=None ):\n return _SelectorDatagramTransport(self,sock,protocol,\n address,waiter,extra)\n \n def close(self):\n if self.is_running():\n raise RuntimeError(\"Cannot close a running event loop\")\n if self.is_closed():\n return\n self._close_self_pipe()\n super().close()\n if self._selector is not None :\n self._selector.close()\n self._selector=None\n \n def _socketpair(self):\n raise NotImplementedError\n \n def _close_self_pipe(self):\n self.remove_reader(self._ssock.fileno())\n self._ssock.close()\n self._ssock=None\n self._csock.close()\n self._csock=None\n self._internal_fds -=1\n \n def _make_self_pipe(self):\n \n self._ssock,self._csock=self._socketpair()\n self._ssock.setblocking(False )\n self._csock.setblocking(False )\n self._internal_fds +=1\n self.add_reader(self._ssock.fileno(),self._read_from_self)\n \n def _process_self_data(self,data):\n pass\n \n def _read_from_self(self):\n while True :\n try :\n data=self._ssock.recv(4096)\n if not data:\n break\n self._process_self_data(data)\n except InterruptedError:\n continue\n except BlockingIOError:\n break\n \n def _write_to_self(self):\n \n \n \n \n \n csock=self._csock\n if csock is not None :\n try :\n csock.send(b'\\0')\n except OSError:\n if self._debug:\n logger.debug(\"Fail to write a null byte into the \"\n \"self-pipe socket\",\n exc_info=True )\n \n def _start_serving(self,protocol_factory,sock,\n sslcontext=None ,server=None ):\n self.add_reader(sock.fileno(),self._accept_connection,\n protocol_factory,sock,sslcontext,server)\n \n def _accept_connection(self,protocol_factory,sock,\n sslcontext=None ,server=None ):\n try :\n conn,addr=sock.accept()\n if self._debug:\n logger.debug(\"%r got a new connection from %r: %r\",\n server,addr,conn)\n conn.setblocking(False )\n except (BlockingIOError,InterruptedError,ConnectionAbortedError):\n pass\n except OSError as exc:\n \n if exc.errno in (errno.EMFILE,errno.ENFILE,\n errno.ENOBUFS,errno.ENOMEM):\n \n \n \n self.call_exception_handler({\n 'message':'socket.accept() out of system resource',\n 'exception':exc,\n 'socket':sock,\n })\n self.remove_reader(sock.fileno())\n self.call_later(constants.ACCEPT_RETRY_DELAY,\n self._start_serving,\n protocol_factory,sock,sslcontext,server)\n else :\n raise\n else :\n extra={'peername':addr}\n accept=self._accept_connection2(protocol_factory,conn,extra,\n sslcontext,server)\n self.create_task(accept)\n \n @coroutine\n def _accept_connection2(self,protocol_factory,conn,extra,\n sslcontext=None ,server=None ):\n protocol=None\n transport=None\n try :\n protocol=protocol_factory()\n waiter=futures.Future(loop=self)\n if sslcontext:\n transport=self._make_ssl_transport(\n conn,protocol,sslcontext,waiter=waiter,\n server_side=True ,extra=extra,server=server)\n else :\n transport=self._make_socket_transport(\n conn,protocol,waiter=waiter,extra=extra,\n server=server)\n \n try :\n yield from waiter\n except :\n transport.close()\n raise\n \n \n except Exception as exc:\n if self._debug:\n context={\n 'message':('Error on transport creation '\n 'for incoming connection'),\n 'exception':exc,\n }\n if protocol is not None :\n context['protocol']=protocol\n if transport is not None :\n context['transport']=transport\n self.call_exception_handler(context)\n \n def add_reader(self,fd,callback,*args):\n ''\n self._check_closed()\n handle=events.Handle(callback,args,self)\n try :\n key=self._selector.get_key(fd)\n except KeyError:\n self._selector.register(fd,selectors.EVENT_READ,\n (handle,None ))\n else :\n mask,(reader,writer)=key.events,key.data\n self._selector.modify(fd,mask |selectors.EVENT_READ,\n (handle,writer))\n if reader is not None :\n reader.cancel()\n \n def remove_reader(self,fd):\n ''\n if self.is_closed():\n return False\n try :\n key=self._selector.get_key(fd)\n except KeyError:\n return False\n else :\n mask,(reader,writer)=key.events,key.data\n mask &=~selectors.EVENT_READ\n if not mask:\n self._selector.unregister(fd)\n else :\n self._selector.modify(fd,mask,(None ,writer))\n \n if reader is not None :\n reader.cancel()\n return True\n else :\n return False\n \n def add_writer(self,fd,callback,*args):\n ''\n self._check_closed()\n handle=events.Handle(callback,args,self)\n try :\n key=self._selector.get_key(fd)\n except KeyError:\n self._selector.register(fd,selectors.EVENT_WRITE,\n (None ,handle))\n else :\n mask,(reader,writer)=key.events,key.data\n self._selector.modify(fd,mask |selectors.EVENT_WRITE,\n (reader,handle))\n if writer is not None :\n writer.cancel()\n \n def remove_writer(self,fd):\n ''\n if self.is_closed():\n return False\n try :\n key=self._selector.get_key(fd)\n except KeyError:\n return False\n else :\n mask,(reader,writer)=key.events,key.data\n \n mask &=~selectors.EVENT_WRITE\n if not mask:\n self._selector.unregister(fd)\n else :\n self._selector.modify(fd,mask,(reader,None ))\n \n if writer is not None :\n writer.cancel()\n return True\n else :\n return False\n \n def sock_recv(self,sock,n):\n ''\n\n\n\n\n\n\n \n if self._debug and sock.gettimeout()!=0:\n raise ValueError(\"the socket must be non-blocking\")\n fut=futures.Future(loop=self)\n self._sock_recv(fut,False ,sock,n)\n return fut\n \n def _sock_recv(self,fut,registered,sock,n):\n \n \n fd=sock.fileno()\n if registered:\n \n \n \n \n self.remove_reader(fd)\n if fut.cancelled():\n return\n try :\n data=sock.recv(n)\n except (BlockingIOError,InterruptedError):\n self.add_reader(fd,self._sock_recv,fut,True ,sock,n)\n except Exception as exc:\n fut.set_exception(exc)\n else :\n fut.set_result(data)\n \n def sock_sendall(self,sock,data):\n ''\n\n\n\n\n\n\n\n\n \n if self._debug and sock.gettimeout()!=0:\n raise ValueError(\"the socket must be non-blocking\")\n fut=futures.Future(loop=self)\n if data:\n self._sock_sendall(fut,False ,sock,data)\n else :\n fut.set_result(None )\n return fut\n \n def _sock_sendall(self,fut,registered,sock,data):\n fd=sock.fileno()\n \n if registered:\n self.remove_writer(fd)\n if fut.cancelled():\n return\n \n try :\n n=sock.send(data)\n except (BlockingIOError,InterruptedError):\n n=0\n except Exception as exc:\n fut.set_exception(exc)\n return\n \n if n ==len(data):\n fut.set_result(None )\n else :\n if n:\n data=data[n:]\n self.add_writer(fd,self._sock_sendall,fut,True ,sock,data)\n \n def sock_connect(self,sock,address):\n ''\n\n\n\n\n\n\n\n\n \n if self._debug and sock.gettimeout()!=0:\n raise ValueError(\"the socket must be non-blocking\")\n fut=futures.Future(loop=self)\n try :\n if self._debug:\n base_events._check_resolved_address(sock,address)\n except ValueError as err:\n fut.set_exception(err)\n else :\n self._sock_connect(fut,sock,address)\n return fut\n \n def _sock_connect(self,fut,sock,address):\n fd=sock.fileno()\n try :\n while True :\n try :\n sock.connect(address)\n except InterruptedError:\n continue\n else :\n break\n except BlockingIOError:\n fut.add_done_callback(functools.partial(self._sock_connect_done,\n fd))\n self.add_writer(fd,self._sock_connect_cb,fut,sock,address)\n except Exception as exc:\n fut.set_exception(exc)\n else :\n fut.set_result(None )\n \n def _sock_connect_done(self,fd,fut):\n self.remove_writer(fd)\n \n def _sock_connect_cb(self,fut,sock,address):\n if fut.cancelled():\n return\n \n try :\n err=sock.getsockopt(socket.SOL_SOCKET,socket.SO_ERROR)\n if err !=0:\n \n raise OSError(err,'Connect call failed %s'%(address,))\n except (BlockingIOError,InterruptedError):\n \n pass\n except Exception as exc:\n fut.set_exception(exc)\n else :\n fut.set_result(None )\n \n def sock_accept(self,sock):\n ''\n\n\n\n\n\n\n\n \n if self._debug and sock.gettimeout()!=0:\n raise ValueError(\"the socket must be non-blocking\")\n fut=futures.Future(loop=self)\n self._sock_accept(fut,False ,sock)\n return fut\n \n def _sock_accept(self,fut,registered,sock):\n fd=sock.fileno()\n if registered:\n self.remove_reader(fd)\n if fut.cancelled():\n return\n try :\n conn,address=sock.accept()\n conn.setblocking(False )\n except (BlockingIOError,InterruptedError):\n self.add_reader(fd,self._sock_accept,fut,True ,sock)\n except Exception as exc:\n fut.set_exception(exc)\n else :\n fut.set_result((conn,address))\n \n def _process_events(self,event_list):\n for key,mask in event_list:\n fileobj,(reader,writer)=key.fileobj,key.data\n if mask&selectors.EVENT_READ and reader is not None :\n if reader._cancelled:\n self.remove_reader(fileobj)\n else :\n self._add_callback(reader)\n if mask&selectors.EVENT_WRITE and writer is not None :\n if writer._cancelled:\n self.remove_writer(fileobj)\n else :\n self._add_callback(writer)\n \n def _stop_serving(self,sock):\n self.remove_reader(sock.fileno())\n sock.close()\n \n \nclass _SelectorTransport(transports._FlowControlMixin,\ntransports.Transport):\n\n max_size=256 *1024\n \n _buffer_factory=bytearray\n \n \n \n \n _sock=None\n \n def __init__(self,loop,sock,protocol,extra=None ,server=None ):\n super().__init__(extra,loop)\n self._extra['socket']=sock\n self._extra['sockname']=sock.getsockname()\n if'peername'not in self._extra:\n try :\n self._extra['peername']=sock.getpeername()\n except socket.error:\n self._extra['peername']=None\n self._sock=sock\n self._sock_fd=sock.fileno()\n self._protocol=protocol\n self._protocol_connected=True\n self._server=server\n self._buffer=self._buffer_factory()\n self._conn_lost=0\n self._closing=False\n if self._server is not None :\n self._server._attach()\n \n def __repr__(self):\n info=[self.__class__.__name__]\n if self._sock is None :\n info.append('closed')\n elif self._closing:\n info.append('closing')\n info.append('fd=%s'%self._sock_fd)\n \n if self._loop is not None :\n polling=_test_selector_event(self._loop._selector,\n self._sock_fd,selectors.EVENT_READ)\n if polling:\n info.append('read=polling')\n else :\n info.append('read=idle')\n \n polling=_test_selector_event(self._loop._selector,\n self._sock_fd,\n selectors.EVENT_WRITE)\n if polling:\n state='polling'\n else :\n state='idle'\n \n bufsize=self.get_write_buffer_size()\n info.append('write=<%s, bufsize=%s>'%(state,bufsize))\n return'<%s>'%' '.join(info)\n \n def abort(self):\n self._force_close(None )\n \n def close(self):\n if self._closing:\n return\n self._closing=True\n self._loop.remove_reader(self._sock_fd)\n if not self._buffer:\n self._conn_lost +=1\n self._loop.call_soon(self._call_connection_lost,None )\n \n \n \n \n if sys.version_info >=(3,4):\n def __del__(self):\n if self._sock is not None :\n warnings.warn(\"unclosed transport %r\"%self,ResourceWarning)\n self._sock.close()\n \n def _fatal_error(self,exc,message='Fatal error on transport'):\n \n if isinstance(exc,(BrokenPipeError,\n ConnectionResetError,ConnectionAbortedError)):\n if self._loop.get_debug():\n logger.debug(\"%r: %s\",self,message,exc_info=True )\n else :\n self._loop.call_exception_handler({\n 'message':message,\n 'exception':exc,\n 'transport':self,\n 'protocol':self._protocol,\n })\n self._force_close(exc)\n \n def _force_close(self,exc):\n if self._conn_lost:\n return\n if self._buffer:\n self._buffer.clear()\n self._loop.remove_writer(self._sock_fd)\n if not self._closing:\n self._closing=True\n self._loop.remove_reader(self._sock_fd)\n self._conn_lost +=1\n self._loop.call_soon(self._call_connection_lost,exc)\n \n def _call_connection_lost(self,exc):\n try :\n if self._protocol_connected:\n self._protocol.connection_lost(exc)\n finally :\n self._sock.close()\n self._sock=None\n self._protocol=None\n self._loop=None\n server=self._server\n if server is not None :\n server._detach()\n self._server=None\n \n def get_write_buffer_size(self):\n return len(self._buffer)\n \n \nclass _SelectorSocketTransport(_SelectorTransport):\n\n def __init__(self,loop,sock,protocol,waiter=None ,\n extra=None ,server=None ):\n super().__init__(loop,sock,protocol,extra,server)\n self._eof=False\n self._paused=False\n \n self._loop.call_soon(self._protocol.connection_made,self)\n \n self._loop.call_soon(self._loop.add_reader,\n self._sock_fd,self._read_ready)\n if waiter is not None :\n \n self._loop.call_soon(waiter._set_result_unless_cancelled,None )\n \n def pause_reading(self):\n if self._closing:\n raise RuntimeError('Cannot pause_reading() when closing')\n if self._paused:\n raise RuntimeError('Already paused')\n self._paused=True\n self._loop.remove_reader(self._sock_fd)\n if self._loop.get_debug():\n logger.debug(\"%r pauses reading\",self)\n \n def resume_reading(self):\n if not self._paused:\n raise RuntimeError('Not paused')\n self._paused=False\n if self._closing:\n return\n self._loop.add_reader(self._sock_fd,self._read_ready)\n if self._loop.get_debug():\n logger.debug(\"%r resumes reading\",self)\n \n def _read_ready(self):\n try :\n data=self._sock.recv(self.max_size)\n except (BlockingIOError,InterruptedError):\n pass\n except Exception as exc:\n self._fatal_error(exc,'Fatal read error on socket transport')\n else :\n if data:\n self._protocol.data_received(data)\n else :\n if self._loop.get_debug():\n logger.debug(\"%r received EOF\",self)\n keep_open=self._protocol.eof_received()\n if keep_open:\n \n \n \n self._loop.remove_reader(self._sock_fd)\n else :\n self.close()\n \n def write(self,data):\n if not isinstance(data,(bytes,bytearray,memoryview)):\n raise TypeError('data argument must be byte-ish (%r)',\n type(data))\n if self._eof:\n raise RuntimeError('Cannot call write() after write_eof()')\n if not data:\n return\n \n if self._conn_lost:\n if self._conn_lost >=constants.LOG_THRESHOLD_FOR_CONNLOST_WRITES:\n logger.warning('socket.send() raised exception.')\n self._conn_lost +=1\n return\n \n if not self._buffer:\n \n try :\n n=self._sock.send(data)\n except (BlockingIOError,InterruptedError):\n pass\n except Exception as exc:\n self._fatal_error(exc,'Fatal write error on socket transport')\n return\n else :\n data=data[n:]\n if not data:\n return\n \n self._loop.add_writer(self._sock_fd,self._write_ready)\n \n \n self._buffer.extend(data)\n self._maybe_pause_protocol()\n \n def _write_ready(self):\n assert self._buffer,'Data should not be empty'\n \n try :\n n=self._sock.send(self._buffer)\n except (BlockingIOError,InterruptedError):\n pass\n except Exception as exc:\n self._loop.remove_writer(self._sock_fd)\n self._buffer.clear()\n self._fatal_error(exc,'Fatal write error on socket transport')\n else :\n if n:\n del self._buffer[:n]\n self._maybe_resume_protocol()\n if not self._buffer:\n self._loop.remove_writer(self._sock_fd)\n if self._closing:\n self._call_connection_lost(None )\n elif self._eof:\n self._sock.shutdown(socket.SHUT_WR)\n \n def write_eof(self):\n if self._eof:\n return\n self._eof=True\n if not self._buffer:\n self._sock.shutdown(socket.SHUT_WR)\n \n def can_write_eof(self):\n return True\n \n \nclass _SelectorSslTransport(_SelectorTransport):\n\n _buffer_factory=bytearray\n \n def __init__(self,loop,rawsock,protocol,sslcontext,waiter=None ,\n server_side=False ,server_hostname=None ,\n extra=None ,server=None ):\n if ssl is None :\n raise RuntimeError('stdlib ssl module not available')\n \n if not sslcontext:\n sslcontext=sslproto._create_transport_context(server_side,server_hostname)\n \n wrap_kwargs={\n 'server_side':server_side,\n 'do_handshake_on_connect':False ,\n }\n if server_hostname and not server_side:\n wrap_kwargs['server_hostname']=server_hostname\n sslsock=sslcontext.wrap_socket(rawsock,**wrap_kwargs)\n \n super().__init__(loop,sslsock,protocol,extra,server)\n \n self._protocol_connected=False\n \n self._server_hostname=server_hostname\n self._waiter=waiter\n self._sslcontext=sslcontext\n self._paused=False\n \n \n self._extra.update(sslcontext=sslcontext)\n \n if self._loop.get_debug():\n logger.debug(\"%r starts SSL handshake\",self)\n start_time=self._loop.time()\n else :\n start_time=None\n self._on_handshake(start_time)\n \n def _wakeup_waiter(self,exc=None ):\n if self._waiter is None :\n return\n if not self._waiter.cancelled():\n if exc is not None :\n self._waiter.set_exception(exc)\n else :\n self._waiter.set_result(None )\n self._waiter=None\n \n def _on_handshake(self,start_time):\n try :\n self._sock.do_handshake()\n except ssl.SSLWantReadError:\n self._loop.add_reader(self._sock_fd,\n self._on_handshake,start_time)\n return\n except ssl.SSLWantWriteError:\n self._loop.add_writer(self._sock_fd,\n self._on_handshake,start_time)\n return\n except BaseException as exc:\n if self._loop.get_debug():\n logger.warning(\"%r: SSL handshake failed\",\n self,exc_info=True )\n self._loop.remove_reader(self._sock_fd)\n self._loop.remove_writer(self._sock_fd)\n self._sock.close()\n self._wakeup_waiter(exc)\n if isinstance(exc,Exception):\n return\n else :\n raise\n \n self._loop.remove_reader(self._sock_fd)\n self._loop.remove_writer(self._sock_fd)\n \n peercert=self._sock.getpeercert()\n if not hasattr(self._sslcontext,'check_hostname'):\n \n \n if (self._server_hostname and\n self._sslcontext.verify_mode !=ssl.CERT_NONE):\n try :\n ssl.match_hostname(peercert,self._server_hostname)\n except Exception as exc:\n if self._loop.get_debug():\n logger.warning(\"%r: SSL handshake failed \"\n \"on matching the hostname\",\n self,exc_info=True )\n self._sock.close()\n self._wakeup_waiter(exc)\n return\n \n \n self._extra.update(peercert=peercert,\n cipher=self._sock.cipher(),\n compression=self._sock.compression(),\n )\n \n self._read_wants_write=False\n self._write_wants_read=False\n self._loop.add_reader(self._sock_fd,self._read_ready)\n self._protocol_connected=True\n self._loop.call_soon(self._protocol.connection_made,self)\n \n self._loop.call_soon(self._wakeup_waiter)\n \n if self._loop.get_debug():\n dt=self._loop.time()-start_time\n logger.debug(\"%r: SSL handshake took %.1f ms\",self,dt *1e3)\n \n def pause_reading(self):\n \n \n \n \n \n \n if self._closing:\n raise RuntimeError('Cannot pause_reading() when closing')\n if self._paused:\n raise RuntimeError('Already paused')\n self._paused=True\n self._loop.remove_reader(self._sock_fd)\n if self._loop.get_debug():\n logger.debug(\"%r pauses reading\",self)\n \n def resume_reading(self):\n if not self._paused:\n raise RuntimeError('Not paused')\n self._paused=False\n if self._closing:\n return\n self._loop.add_reader(self._sock_fd,self._read_ready)\n if self._loop.get_debug():\n logger.debug(\"%r resumes reading\",self)\n \n def _read_ready(self):\n if self._write_wants_read:\n self._write_wants_read=False\n self._write_ready()\n \n if self._buffer:\n self._loop.add_writer(self._sock_fd,self._write_ready)\n \n try :\n data=self._sock.recv(self.max_size)\n except (BlockingIOError,InterruptedError,ssl.SSLWantReadError):\n pass\n except ssl.SSLWantWriteError:\n self._read_wants_write=True\n self._loop.remove_reader(self._sock_fd)\n self._loop.add_writer(self._sock_fd,self._write_ready)\n except Exception as exc:\n self._fatal_error(exc,'Fatal read error on SSL transport')\n else :\n if data:\n self._protocol.data_received(data)\n else :\n try :\n if self._loop.get_debug():\n logger.debug(\"%r received EOF\",self)\n keep_open=self._protocol.eof_received()\n if keep_open:\n logger.warning('returning true from eof_received() '\n 'has no effect when using ssl')\n finally :\n self.close()\n \n def _write_ready(self):\n if self._read_wants_write:\n self._read_wants_write=False\n self._read_ready()\n \n if not (self._paused or self._closing):\n self._loop.add_reader(self._sock_fd,self._read_ready)\n \n if self._buffer:\n try :\n n=self._sock.send(self._buffer)\n except (BlockingIOError,InterruptedError,ssl.SSLWantWriteError):\n n=0\n except ssl.SSLWantReadError:\n n=0\n self._loop.remove_writer(self._sock_fd)\n self._write_wants_read=True\n except Exception as exc:\n self._loop.remove_writer(self._sock_fd)\n self._buffer.clear()\n self._fatal_error(exc,'Fatal write error on SSL transport')\n return\n \n if n:\n del self._buffer[:n]\n \n self._maybe_resume_protocol()\n \n if not self._buffer:\n self._loop.remove_writer(self._sock_fd)\n if self._closing:\n self._call_connection_lost(None )\n \n def write(self,data):\n if not isinstance(data,(bytes,bytearray,memoryview)):\n raise TypeError('data argument must be byte-ish (%r)',\n type(data))\n if not data:\n return\n \n if self._conn_lost:\n if self._conn_lost >=constants.LOG_THRESHOLD_FOR_CONNLOST_WRITES:\n logger.warning('socket.send() raised exception.')\n self._conn_lost +=1\n return\n \n if not self._buffer:\n self._loop.add_writer(self._sock_fd,self._write_ready)\n \n \n self._buffer.extend(data)\n self._maybe_pause_protocol()\n \n def can_write_eof(self):\n return False\n \n \nclass _SelectorDatagramTransport(_SelectorTransport):\n\n _buffer_factory=collections.deque\n \n def __init__(self,loop,sock,protocol,address=None ,\n waiter=None ,extra=None ):\n super().__init__(loop,sock,protocol,extra)\n self._address=address\n self._loop.call_soon(self._protocol.connection_made,self)\n \n self._loop.call_soon(self._loop.add_reader,\n self._sock_fd,self._read_ready)\n if waiter is not None :\n \n self._loop.call_soon(waiter._set_result_unless_cancelled,None )\n \n def get_write_buffer_size(self):\n return sum(len(data)for data,_ in self._buffer)\n \n def _read_ready(self):\n try :\n data,addr=self._sock.recvfrom(self.max_size)\n except (BlockingIOError,InterruptedError):\n pass\n except OSError as exc:\n self._protocol.error_received(exc)\n except Exception as exc:\n self._fatal_error(exc,'Fatal read error on datagram transport')\n else :\n self._protocol.datagram_received(data,addr)\n \n def sendto(self,data,addr=None ):\n if not isinstance(data,(bytes,bytearray,memoryview)):\n raise TypeError('data argument must be byte-ish (%r)',\n type(data))\n if not data:\n return\n \n if self._address and addr not in (None ,self._address):\n raise ValueError('Invalid address: must be None or %s'%\n (self._address,))\n \n if self._conn_lost and self._address:\n if self._conn_lost >=constants.LOG_THRESHOLD_FOR_CONNLOST_WRITES:\n logger.warning('socket.send() raised exception.')\n self._conn_lost +=1\n return\n \n if not self._buffer:\n \n try :\n if self._address:\n self._sock.send(data)\n else :\n self._sock.sendto(data,addr)\n return\n except (BlockingIOError,InterruptedError):\n self._loop.add_writer(self._sock_fd,self._sendto_ready)\n except OSError as exc:\n self._protocol.error_received(exc)\n return\n except Exception as exc:\n self._fatal_error(exc,\n 'Fatal write error on datagram transport')\n return\n \n \n self._buffer.append((bytes(data),addr))\n self._maybe_pause_protocol()\n \n def _sendto_ready(self):\n while self._buffer:\n data,addr=self._buffer.popleft()\n try :\n if self._address:\n self._sock.send(data)\n else :\n self._sock.sendto(data,addr)\n except (BlockingIOError,InterruptedError):\n self._buffer.appendleft((data,addr))\n break\n except OSError as exc:\n self._protocol.error_received(exc)\n return\n except Exception as exc:\n self._fatal_error(exc,\n 'Fatal write error on datagram transport')\n return\n \n self._maybe_resume_protocol()\n if not self._buffer:\n self._loop.remove_writer(self._sock_fd)\n if self._closing:\n self._call_connection_lost(None )\n"], "asyncio.coroutines": [".py", "__all__=['coroutine',\n'iscoroutinefunction','iscoroutine']\n\nimport functools\nimport inspect\nimport opcode\nimport os\nimport sys\nimport traceback\nimport types\n\nfrom .import events\nfrom .import futures\nfrom .log import logger\n\n\n\n_YIELD_FROM=opcode.opmap['YIELD_FROM']\n\n\n\n\n\n\n\n\n\n\n_DEBUG=(not sys.flags.ignore_environment\nand bool(os.environ.get('PYTHONASYNCIODEBUG')))\n\n\n\ndef has_yield_from_bug():\n class MyGen:\n def __init__(self):\n self.send_args=None\n def __iter__(self):\n return self\n def __next__(self):\n return 42\n def send(self,*what):\n self.send_args=what\n return None\n def yield_from_gen(gen):\n yield from gen\n value=(1,2,3)\n gen=MyGen()\n coro=yield_from_gen(gen)\n next(coro)\n coro.send(value)\n return gen.send_args !=(value,)\n_YIELD_FROM_BUG=has_yield_from_bug()\ndel has_yield_from_bug\n\n\nclass CoroWrapper:\n\n\n def __init__(self,gen,func):\n assert inspect.isgenerator(gen),gen\n self.gen=gen\n self.func=func\n self._source_traceback=traceback.extract_stack(sys._getframe(1))\n \n \n \n def __repr__(self):\n coro_repr=_format_coroutine(self)\n if self._source_traceback:\n frame=self._source_traceback[-1]\n coro_repr +=', created at %s:%s'%(frame[0],frame[1])\n return'<%s %s>'%(self.__class__.__name__,coro_repr)\n \n def __iter__(self):\n return self\n \n def __next__(self):\n return next(self.gen)\n \n if _YIELD_FROM_BUG:\n \n \n \n \n def send(self,*value):\n frame=sys._getframe()\n caller=frame.f_back\n assert caller.f_lasti >=0\n if caller.f_code.co_code[caller.f_lasti]!=_YIELD_FROM:\n value=value[0]\n return self.gen.send(value)\n else :\n def send(self,value):\n return self.gen.send(value)\n \n def throw(self,exc):\n return self.gen.throw(exc)\n \n def close(self):\n return self.gen.close()\n \n @property\n def gi_frame(self):\n return None\n \n \n @property\n def gi_running(self):\n return self.gen.gi_running\n \n @property\n def gi_code(self):\n return self.gen.__code__\n \n def __del__(self):\n \n gen=getattr(self,'gen',None )\n frame=getattr(gen,'gi_frame',None )\n if frame is not None and frame.f_lasti ==-1:\n msg='%r was never yielded from'%self\n tb=getattr(self,'_source_traceback',())\n if tb:\n tb=''.join(traceback.format_list(tb))\n msg +=('\\nCoroutine object created at '\n '(most recent call last):\\n')\n msg +=tb.rstrip()\n logger.error(msg)\n \n \ndef coroutine(func):\n ''\n\n\n\n \n if inspect.isgeneratorfunction(func):\n coro=func\n else :\n @functools.wraps(func)\n def coro(*args,**kw):\n res=func(*args,**kw)\n if isinstance(res,futures.Future)or inspect.isgenerator(res):\n res=yield from res\n res.gi_frame=None\n return res\n \n if not _DEBUG:\n wrapper=coro\n else :\n @functools.wraps(func)\n def wrapper(*args,**kwds):\n w=CoroWrapper(coro(*args,**kwds),func)\n if w._source_traceback:\n del w._source_traceback[-1]\n w.__name__=func.__name__\n if hasattr(func,'__qualname__'):\n w.__qualname__=func.__qualname__\n w.__doc__=func.__doc__\n return w\n \n wrapper.gi_frame=None\n wrapper._is_coroutine=True\n return wrapper\n \n \ndef iscoroutinefunction(func):\n ''\n return getattr(func,'_is_coroutine',False )\n \n \n_COROUTINE_TYPES=(types.GeneratorType,CoroWrapper)\n\ndef iscoroutine(obj):\n ''\n return isinstance(obj,_COROUTINE_TYPES)\n \n \ndef _format_coroutine(coro):\n try :\n assert iscoroutine(coro)\n coro_name=getattr(coro,'__qualname__',coro.__name__)\n \n filename=coro.__code__.co_filename\n if (isinstance(coro,CoroWrapper)\n and not inspect.isgeneratorfunction(coro.func)):\n filename,lineno=events._get_function_source(coro.func)\n if coro.gi_frame is None :\n coro_repr=('%s() done, defined at %s:%s'\n %(coro_name,filename,lineno))\n else :\n coro_repr=('%s() running, defined at %s:%s'\n %(coro_name,filename,lineno))\n elif coro.gi_frame is not None :\n lineno=coro.gi_frame.f_lineno\n coro_repr=('%s() running at %s:%s'\n %(coro_name,filename,lineno))\n else :\n lineno=coro.__code__.co_firstlineno\n coro_repr=('%s() done, defined at %s:%s'\n %(coro_name,filename,lineno))\n except :\n coro_repr=\"Coroutine: %s\"%coro_name\n return coro_repr\n"], "encodings.koi8_r": [".py", "''\n\n\n\nimport codecs\n\n\n\nclass Codec(codecs.Codec):\n\n def encode(self,input,errors='strict'):\n return codecs.charmap_encode(input,errors,encoding_table)\n \n def decode(self,input,errors='strict'):\n return codecs.charmap_decode(input,errors,decoding_table)\n \nclass IncrementalEncoder(codecs.IncrementalEncoder):\n def encode(self,input,final=False ):\n return codecs.charmap_encode(input,self.errors,encoding_table)[0]\n \nclass IncrementalDecoder(codecs.IncrementalDecoder):\n def decode(self,input,final=False ):\n return codecs.charmap_decode(input,self.errors,decoding_table)[0]\n \nclass StreamWriter(Codec,codecs.StreamWriter):\n pass\n \nclass StreamReader(Codec,codecs.StreamReader):\n pass\n \n \n \ndef getregentry():\n return codecs.CodecInfo(\n name='koi8-r',\n encode=Codec().encode,\n decode=Codec().decode,\n incrementalencoder=IncrementalEncoder,\n incrementaldecoder=IncrementalDecoder,\n streamreader=StreamReader,\n streamwriter=StreamWriter,\n )\n \n \n \n \ndecoding_table=(\n'\\x00'\n'\\x01'\n'\\x02'\n'\\x03'\n'\\x04'\n'\\x05'\n'\\x06'\n'\\x07'\n'\\x08'\n'\\t'\n'\\n'\n'\\x0b'\n'\\x0c'\n'\\r'\n'\\x0e'\n'\\x0f'\n'\\x10'\n'\\x11'\n'\\x12'\n'\\x13'\n'\\x14'\n'\\x15'\n'\\x16'\n'\\x17'\n'\\x18'\n'\\x19'\n'\\x1a'\n'\\x1b'\n'\\x1c'\n'\\x1d'\n'\\x1e'\n'\\x1f'\n' '\n'!'\n'\"'\n'#'\n'$'\n'%'\n'&'\n\"'\"\n'('\n')'\n'*'\n'+'\n','\n'-'\n'.'\n'/'\n'0'\n'1'\n'2'\n'3'\n'4'\n'5'\n'6'\n'7'\n'8'\n'9'\n':'\n';'\n'<'\n'='\n'>'\n'?'\n'@'\n'A'\n'B'\n'C'\n'D'\n'E'\n'F'\n'G'\n'H'\n'I'\n'J'\n'K'\n'L'\n'M'\n'N'\n'O'\n'P'\n'Q'\n'R'\n'S'\n'T'\n'U'\n'V'\n'W'\n'X'\n'Y'\n'Z'\n'['\n'\\\\'\n']'\n'^'\n'_'\n'`'\n'a'\n'b'\n'c'\n'd'\n'e'\n'f'\n'g'\n'h'\n'i'\n'j'\n'k'\n'l'\n'm'\n'n'\n'o'\n'p'\n'q'\n'r'\n's'\n't'\n'u'\n'v'\n'w'\n'x'\n'y'\n'z'\n'{'\n'|'\n'}'\n'~'\n'\\x7f'\n'\\u2500'\n'\\u2502'\n'\\u250c'\n'\\u2510'\n'\\u2514'\n'\\u2518'\n'\\u251c'\n'\\u2524'\n'\\u252c'\n'\\u2534'\n'\\u253c'\n'\\u2580'\n'\\u2584'\n'\\u2588'\n'\\u258c'\n'\\u2590'\n'\\u2591'\n'\\u2592'\n'\\u2593'\n'\\u2320'\n'\\u25a0'\n'\\u2219'\n'\\u221a'\n'\\u2248'\n'\\u2264'\n'\\u2265'\n'\\xa0'\n'\\u2321'\n'\\xb0'\n'\\xb2'\n'\\xb7'\n'\\xf7'\n'\\u2550'\n'\\u2551'\n'\\u2552'\n'\\u0451'\n'\\u2553'\n'\\u2554'\n'\\u2555'\n'\\u2556'\n'\\u2557'\n'\\u2558'\n'\\u2559'\n'\\u255a'\n'\\u255b'\n'\\u255c'\n'\\u255d'\n'\\u255e'\n'\\u255f'\n'\\u2560'\n'\\u2561'\n'\\u0401'\n'\\u2562'\n'\\u2563'\n'\\u2564'\n'\\u2565'\n'\\u2566'\n'\\u2567'\n'\\u2568'\n'\\u2569'\n'\\u256a'\n'\\u256b'\n'\\u256c'\n'\\xa9'\n'\\u044e'\n'\\u0430'\n'\\u0431'\n'\\u0446'\n'\\u0434'\n'\\u0435'\n'\\u0444'\n'\\u0433'\n'\\u0445'\n'\\u0438'\n'\\u0439'\n'\\u043a'\n'\\u043b'\n'\\u043c'\n'\\u043d'\n'\\u043e'\n'\\u043f'\n'\\u044f'\n'\\u0440'\n'\\u0441'\n'\\u0442'\n'\\u0443'\n'\\u0436'\n'\\u0432'\n'\\u044c'\n'\\u044b'\n'\\u0437'\n'\\u0448'\n'\\u044d'\n'\\u0449'\n'\\u0447'\n'\\u044a'\n'\\u042e'\n'\\u0410'\n'\\u0411'\n'\\u0426'\n'\\u0414'\n'\\u0415'\n'\\u0424'\n'\\u0413'\n'\\u0425'\n'\\u0418'\n'\\u0419'\n'\\u041a'\n'\\u041b'\n'\\u041c'\n'\\u041d'\n'\\u041e'\n'\\u041f'\n'\\u042f'\n'\\u0420'\n'\\u0421'\n'\\u0422'\n'\\u0423'\n'\\u0416'\n'\\u0412'\n'\\u042c'\n'\\u042b'\n'\\u0417'\n'\\u0428'\n'\\u042d'\n'\\u0429'\n'\\u0427'\n'\\u042a'\n)\n\n\nencoding_table=codecs.charmap_build(decoding_table)\n"], "encodings.cp874": [".py", "''\n\n\n\nimport codecs\n\n\n\nclass Codec(codecs.Codec):\n\n def encode(self,input,errors='strict'):\n return codecs.charmap_encode(input,errors,encoding_table)\n \n def decode(self,input,errors='strict'):\n return codecs.charmap_decode(input,errors,decoding_table)\n \nclass IncrementalEncoder(codecs.IncrementalEncoder):\n def encode(self,input,final=False ):\n return codecs.charmap_encode(input,self.errors,encoding_table)[0]\n \nclass IncrementalDecoder(codecs.IncrementalDecoder):\n def decode(self,input,final=False ):\n return codecs.charmap_decode(input,self.errors,decoding_table)[0]\n \nclass StreamWriter(Codec,codecs.StreamWriter):\n pass\n \nclass StreamReader(Codec,codecs.StreamReader):\n pass\n \n \n \ndef getregentry():\n return codecs.CodecInfo(\n name='cp874',\n encode=Codec().encode,\n decode=Codec().decode,\n incrementalencoder=IncrementalEncoder,\n incrementaldecoder=IncrementalDecoder,\n streamreader=StreamReader,\n streamwriter=StreamWriter,\n )\n \n \n \n \ndecoding_table=(\n'\\x00'\n'\\x01'\n'\\x02'\n'\\x03'\n'\\x04'\n'\\x05'\n'\\x06'\n'\\x07'\n'\\x08'\n'\\t'\n'\\n'\n'\\x0b'\n'\\x0c'\n'\\r'\n'\\x0e'\n'\\x0f'\n'\\x10'\n'\\x11'\n'\\x12'\n'\\x13'\n'\\x14'\n'\\x15'\n'\\x16'\n'\\x17'\n'\\x18'\n'\\x19'\n'\\x1a'\n'\\x1b'\n'\\x1c'\n'\\x1d'\n'\\x1e'\n'\\x1f'\n' '\n'!'\n'\"'\n'#'\n'$'\n'%'\n'&'\n\"'\"\n'('\n')'\n'*'\n'+'\n','\n'-'\n'.'\n'/'\n'0'\n'1'\n'2'\n'3'\n'4'\n'5'\n'6'\n'7'\n'8'\n'9'\n':'\n';'\n'<'\n'='\n'>'\n'?'\n'@'\n'A'\n'B'\n'C'\n'D'\n'E'\n'F'\n'G'\n'H'\n'I'\n'J'\n'K'\n'L'\n'M'\n'N'\n'O'\n'P'\n'Q'\n'R'\n'S'\n'T'\n'U'\n'V'\n'W'\n'X'\n'Y'\n'Z'\n'['\n'\\\\'\n']'\n'^'\n'_'\n'`'\n'a'\n'b'\n'c'\n'd'\n'e'\n'f'\n'g'\n'h'\n'i'\n'j'\n'k'\n'l'\n'm'\n'n'\n'o'\n'p'\n'q'\n'r'\n's'\n't'\n'u'\n'v'\n'w'\n'x'\n'y'\n'z'\n'{'\n'|'\n'}'\n'~'\n'\\x7f'\n'\\u20ac'\n'\\ufffe'\n'\\ufffe'\n'\\ufffe'\n'\\ufffe'\n'\\u2026'\n'\\ufffe'\n'\\ufffe'\n'\\ufffe'\n'\\ufffe'\n'\\ufffe'\n'\\ufffe'\n'\\ufffe'\n'\\ufffe'\n'\\ufffe'\n'\\ufffe'\n'\\ufffe'\n'\\u2018'\n'\\u2019'\n'\\u201c'\n'\\u201d'\n'\\u2022'\n'\\u2013'\n'\\u2014'\n'\\ufffe'\n'\\ufffe'\n'\\ufffe'\n'\\ufffe'\n'\\ufffe'\n'\\ufffe'\n'\\ufffe'\n'\\ufffe'\n'\\xa0'\n'\\u0e01'\n'\\u0e02'\n'\\u0e03'\n'\\u0e04'\n'\\u0e05'\n'\\u0e06'\n'\\u0e07'\n'\\u0e08'\n'\\u0e09'\n'\\u0e0a'\n'\\u0e0b'\n'\\u0e0c'\n'\\u0e0d'\n'\\u0e0e'\n'\\u0e0f'\n'\\u0e10'\n'\\u0e11'\n'\\u0e12'\n'\\u0e13'\n'\\u0e14'\n'\\u0e15'\n'\\u0e16'\n'\\u0e17'\n'\\u0e18'\n'\\u0e19'\n'\\u0e1a'\n'\\u0e1b'\n'\\u0e1c'\n'\\u0e1d'\n'\\u0e1e'\n'\\u0e1f'\n'\\u0e20'\n'\\u0e21'\n'\\u0e22'\n'\\u0e23'\n'\\u0e24'\n'\\u0e25'\n'\\u0e26'\n'\\u0e27'\n'\\u0e28'\n'\\u0e29'\n'\\u0e2a'\n'\\u0e2b'\n'\\u0e2c'\n'\\u0e2d'\n'\\u0e2e'\n'\\u0e2f'\n'\\u0e30'\n'\\u0e31'\n'\\u0e32'\n'\\u0e33'\n'\\u0e34'\n'\\u0e35'\n'\\u0e36'\n'\\u0e37'\n'\\u0e38'\n'\\u0e39'\n'\\u0e3a'\n'\\ufffe'\n'\\ufffe'\n'\\ufffe'\n'\\ufffe'\n'\\u0e3f'\n'\\u0e40'\n'\\u0e41'\n'\\u0e42'\n'\\u0e43'\n'\\u0e44'\n'\\u0e45'\n'\\u0e46'\n'\\u0e47'\n'\\u0e48'\n'\\u0e49'\n'\\u0e4a'\n'\\u0e4b'\n'\\u0e4c'\n'\\u0e4d'\n'\\u0e4e'\n'\\u0e4f'\n'\\u0e50'\n'\\u0e51'\n'\\u0e52'\n'\\u0e53'\n'\\u0e54'\n'\\u0e55'\n'\\u0e56'\n'\\u0e57'\n'\\u0e58'\n'\\u0e59'\n'\\u0e5a'\n'\\u0e5b'\n'\\ufffe'\n'\\ufffe'\n'\\ufffe'\n'\\ufffe'\n)\n\n\nencoding_table=codecs.charmap_build(decoding_table)\n"], "importlib._bootstrap": [".py", "''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n_CASE_INSENSITIVE_PLATFORMS='win','cygwin','darwin'\n\n\ndef _make_relax_case():\n if sys.platform.startswith(_CASE_INSENSITIVE_PLATFORMS):\n def _relax_case():\n ''\n return b'PYTHONCASEOK'in _os.environ\n else :\n def _relax_case():\n ''\n return False\n return _relax_case\n \n \n \ndef _w_long(x):\n ''\n\n\n\n \n x=int(x)\n int_bytes=[]\n int_bytes.append(x&0xFF)\n int_bytes.append((x >>8)&0xFF)\n int_bytes.append((x >>16)&0xFF)\n int_bytes.append((x >>24)&0xFF)\n return bytearray(int_bytes)\n \n \n \ndef _r_long(int_bytes):\n ''\n\n\n\n \n x=int_bytes[0]\n x |=int_bytes[1]<<8\n x |=int_bytes[2]<<16\n x |=int_bytes[3]<<24\n return x\n \n \ndef _path_join(*path_parts):\n ''\n new_parts=[]\n for part in path_parts:\n if not part:\n continue\n new_parts.append(part)\n if part[-1]not in path_separators:\n new_parts.append(path_sep)\n return''.join(new_parts[:-1])\n \n \ndef _path_split(path):\n ''\n for x in reversed(path):\n if x in path_separators:\n sep=x\n break\n else :\n sep=path_sep\n front,_,tail=path.rpartition(sep)\n return front,tail\n \n \ndef _path_is_mode_type(path,mode):\n ''\n try :\n stat_info=_os.stat(path)\n except OSError:\n return False\n return (stat_info.st_mode&0o170000)==mode\n \n \n \ndef _path_isfile(path):\n ''\n return _path_is_mode_type(path,0o100000)\n \n \n \ndef _path_isdir(path):\n ''\n if not path:\n path=_os.getcwd()\n return _path_is_mode_type(path,0o040000)\n \n \ndef _write_atomic(path,data,mode=0o666):\n ''\n\n \n \n path_tmp='{}.{}'.format(path,id(path))\n fd=_os.open(path_tmp,\n _os.O_EXCL |_os.O_CREAT |_os.O_WRONLY,mode&0o666)\n try :\n \n \n with _io.FileIO(fd,'wb')as file:\n file.write(data)\n _os.replace(path_tmp,path)\n except OSError:\n try :\n _os.unlink(path_tmp)\n except OSError:\n pass\n raise\n \n \ndef _wrap(new,old):\n ''\n for replace in ['__module__','__name__','__qualname__','__doc__']:\n if hasattr(old,replace):\n setattr(new,replace,getattr(old,replace))\n new.__dict__.update(old.__dict__)\n \n \n_code_type=type(_wrap.__code__)\n\n\ndef new_module(name):\n ''\n\n\n\n \n return type(_io)(name)\n \n \n \n \n \n_module_locks={}\n\n_blocking_on={}\n\n\nclass _DeadlockError(RuntimeError):\n pass\n \n \nclass _ModuleLock:\n ''\n\n\n \n \n def __init__(self,name):\n self.lock=_thread.allocate_lock()\n self.wakeup=_thread.allocate_lock()\n self.name=name\n self.owner=None\n self.count=0\n self.waiters=0\n \n def has_deadlock(self):\n \n me=_thread.get_ident()\n tid=self.owner\n while True :\n lock=_blocking_on.get(tid)\n if lock is None :\n return False\n tid=lock.owner\n if tid ==me:\n return True\n \n def acquire(self):\n ''\n\n\n\n \n tid=_thread.get_ident()\n _blocking_on[tid]=self\n try :\n while True :\n with self.lock:\n if self.count ==0 or self.owner ==tid:\n self.owner=tid\n self.count +=1\n return True\n if self.has_deadlock():\n raise _DeadlockError(\"deadlock detected by %r\"%self)\n if self.wakeup.acquire(False ):\n self.waiters +=1\n \n self.wakeup.acquire()\n self.wakeup.release()\n finally :\n del _blocking_on[tid]\n \n def release(self):\n tid=_thread.get_ident()\n with self.lock:\n if self.owner !=tid:\n raise RuntimeError(\"cannot release un-acquired lock\")\n assert self.count >0\n self.count -=1\n if self.count ==0:\n self.owner=None\n if self.waiters:\n self.waiters -=1\n self.wakeup.release()\n \n def __repr__(self):\n return\"_ModuleLock(%r) at %d\"%(self.name,id(self))\n \n \nclass _DummyModuleLock:\n ''\n \n \n def __init__(self,name):\n self.name=name\n self.count=0\n \n def acquire(self):\n self.count +=1\n return True\n \n def release(self):\n if self.count ==0:\n raise RuntimeError(\"cannot release un-acquired lock\")\n self.count -=1\n \n def __repr__(self):\n return\"_DummyModuleLock(%r) at %d\"%(self.name,id(self))\n \n \n \n \ndef _get_module_lock(name):\n ''\n\n \n lock=None\n try :\n lock=_module_locks[name]()\n except KeyError:\n pass\n if lock is None :\n if _thread is None :\n lock=_DummyModuleLock(name)\n else :\n lock=_ModuleLock(name)\n def cb(_):\n del _module_locks[name]\n _module_locks[name]=_weakref.ref(lock,cb)\n return lock\n \ndef _lock_unlock_module(name):\n ''\n\n\n\n\n \n lock=_get_module_lock(name)\n _imp.release_lock()\n try :\n lock.acquire()\n except _DeadlockError:\n \n \n pass\n else :\n lock.release()\n \n \n \ndef _call_with_frames_removed(f,*args,**kwds):\n ''\n\n\n\n\n\n \n return f(*args,**kwds)\n \n \n \n \n\"\"\"Magic word to reject .pyc files generated by other Python versions.\nIt should change for each incompatible change to the bytecode.\n\nThe value of CR and LF is incorporated so if you ever read or write\na .pyc file in text mode the magic number will be wrong; also, the\nApple MPW compiler swaps their values, botching string constants.\n\nThe magic numbers must be spaced apart at least 2 values, as the\n-U interpeter flag will cause MAGIC+1 being used. They have been\nodd numbers for some time now.\n\nThere were a variety of old schemes for setting the magic number.\nThe current working scheme is to increment the previous value by\n10.\n\nStarting with the adoption of PEP 3147 in Python 3.2, every bump in magic\nnumber also includes a new \"magic tag\", i.e. a human readable string used\nto represent the magic number in __pycache__ directories. When you change\nthe magic number, you must also set a new unique magic tag. Generally this\ncan be named after the Python major version of the magic number bump, but\nit can really be anything, as long as it's different than anything else\nthat's come before. The tags are included in the following table, starting\nwith Python 3.2a0.\n\nKnown values:\n Python 1.5: 20121\n Python 1.5.1: 20121\n Python 1.5.2: 20121\n Python 1.6: 50428\n Python 2.0: 50823\n Python 2.0.1: 50823\n Python 2.1: 60202\n Python 2.1.1: 60202\n Python 2.1.2: 60202\n Python 2.2: 60717\n Python 2.3a0: 62011\n Python 2.3a0: 62021\n Python 2.3a0: 62011 (!)\n Python 2.4a0: 62041\n Python 2.4a3: 62051\n Python 2.4b1: 62061\n Python 2.5a0: 62071\n Python 2.5a0: 62081 (ast-branch)\n Python 2.5a0: 62091 (with)\n Python 2.5a0: 62092 (changed WITH_CLEANUP opcode)\n Python 2.5b3: 62101 (fix wrong code: for x, in ...)\n Python 2.5b3: 62111 (fix wrong code: x += yield)\n Python 2.5c1: 62121 (fix wrong lnotab with for loops and\n storing constants that should have been removed)\n Python 2.5c2: 62131 (fix wrong code: for x, in ... in listcomp/genexp)\n Python 2.6a0: 62151 (peephole optimizations and STORE_MAP opcode)\n Python 2.6a1: 62161 (WITH_CLEANUP optimization)\n Python 3000: 3000\n 3010 (removed UNARY_CONVERT)\n 3020 (added BUILD_SET)\n 3030 (added keyword-only parameters)\n 3040 (added signature annotations)\n 3050 (print becomes a function)\n 3060 (PEP 3115 metaclass syntax)\n 3061 (string literals become unicode)\n 3071 (PEP 3109 raise changes)\n 3081 (PEP 3137 make __file__ and __name__ unicode)\n 3091 (kill str8 interning)\n 3101 (merge from 2.6a0, see 62151)\n 3103 (__file__ points to source file)\n Python 3.0a4: 3111 (WITH_CLEANUP optimization).\n Python 3.0a5: 3131 (lexical exception stacking, including POP_EXCEPT)\n Python 3.1a0: 3141 (optimize list, set and dict comprehensions:\n change LIST_APPEND and SET_ADD, add MAP_ADD)\n Python 3.1a0: 3151 (optimize conditional branches:\n introduce POP_JUMP_IF_FALSE and POP_JUMP_IF_TRUE)\n Python 3.2a0: 3160 (add SETUP_WITH)\n tag: cpython-32\n Python 3.2a1: 3170 (add DUP_TOP_TWO, remove DUP_TOPX and ROT_FOUR)\n tag: cpython-32\n Python 3.2a2 3180 (add DELETE_DEREF)\n Python 3.3a0 3190 __class__ super closure changed\n Python 3.3a0 3200 (__qualname__ added)\n 3210 (added size modulo 2**32 to the pyc header)\n Python 3.3a1 3220 (changed PEP 380 implementation)\n Python 3.3a4 3230 (revert changes to implicit __class__ closure)\n\nMAGIC must change whenever the bytecode emitted by the compiler may no\nlonger be understood by older implementations of the eval loop (usually\ndue to the addition of new opcodes).\n\n\"\"\"\n_RAW_MAGIC_NUMBER=3230 |ord('\\r')<<16 |ord('\\n')<<24\n_MAGIC_BYTES=bytes(_RAW_MAGIC_NUMBER >>n&0xff for n in range(0,25,8))\n\n_PYCACHE='__pycache__'\n\nSOURCE_SUFFIXES=['.py']\n\nDEBUG_BYTECODE_SUFFIXES=['.pyc']\nOPTIMIZED_BYTECODE_SUFFIXES=['.pyo']\n\ndef cache_from_source(path,debug_override=None ):\n ''\n\n\n\n\n\n\n\n\n\n\n \n debug=not sys.flags.optimize if debug_override is None else debug_override\n if debug:\n suffixes=DEBUG_BYTECODE_SUFFIXES\n else :\n suffixes=OPTIMIZED_BYTECODE_SUFFIXES\n head,tail=_path_split(path)\n base_filename,sep,_=tail.partition('.')\n tag=sys.implementation.cache_tag\n if tag is None :\n raise NotImplementedError('sys.implementation.cache_tag is None')\n filename=''.join([base_filename,sep,tag,suffixes[0]])\n return _path_join(head,_PYCACHE,filename)\n \n \ndef source_from_cache(path):\n ''\n\n\n\n\n\n\n \n if sys.implementation.cache_tag is None :\n raise NotImplementedError('sys.implementation.cache_tag is None')\n head,pycache_filename=_path_split(path)\n head,pycache=_path_split(head)\n if pycache !=_PYCACHE:\n raise ValueError('{} not bottom-level directory in '\n '{!r}'.format(_PYCACHE,path))\n if pycache_filename.count('.')!=2:\n raise ValueError('expected only 2 dots in '\n '{!r}'.format(pycache_filename))\n base_filename=pycache_filename.partition('.')[0]\n return _path_join(head,base_filename+SOURCE_SUFFIXES[0])\n \n \ndef _get_sourcefile(bytecode_path):\n ''\n\n\n\n\n \n if len(bytecode_path)==0:\n return None\n rest,_,extension=bytecode_path.rpartition('.')\n if not rest or extension.lower()[-3:-1]!='py':\n return bytecode_path\n try :\n source_path=source_from_cache(bytecode_path)\n except (NotImplementedError,ValueError):\n source_path=bytecode_path[:-1]\n return source_path if _path_isfile(source_path)else bytecode_path\n \n \ndef _verbose_message(message,*args,verbosity=1):\n ''\n if sys.flags.verbose >=verbosity:\n if not message.startswith(('#','import ')):\n message='# '+message\n print(message.format(*args),file=sys.stderr)\n \n \ndef set_package(fxn):\n ''\n def set_package_wrapper(*args,**kwargs):\n module=fxn(*args,**kwargs)\n if getattr(module,'__package__',None )is None :\n module.__package__=module.__name__\n if not hasattr(module,'__path__'):\n module.__package__=module.__package__.rpartition('.')[0]\n return module\n _wrap(set_package_wrapper,fxn)\n return set_package_wrapper\n \n \ndef set_loader(fxn):\n ''\n def set_loader_wrapper(self,*args,**kwargs):\n module=fxn(self,*args,**kwargs)\n if not hasattr(module,'__loader__'):\n module.__loader__=self\n return module\n _wrap(set_loader_wrapper,fxn)\n return set_loader_wrapper\n \n \ndef module_for_loader(fxn):\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n def module_for_loader_wrapper(self,fullname,*args,**kwargs):\n module=sys.modules.get(fullname)\n is_reload=module is not None\n if not is_reload:\n \n \n \n module=new_module(fullname)\n \n \n module.__initializing__=True\n sys.modules[fullname]=module\n module.__loader__=self\n try :\n is_package=self.is_package(fullname)\n except (ImportError,AttributeError):\n pass\n else :\n if is_package:\n module.__package__=fullname\n else :\n module.__package__=fullname.rpartition('.')[0]\n else :\n module.__initializing__=True\n try :\n \n return fxn(self,module,*args,**kwargs)\n except :\n if not is_reload:\n del sys.modules[fullname]\n raise\n finally :\n module.__initializing__=False\n _wrap(module_for_loader_wrapper,fxn)\n return module_for_loader_wrapper\n \n \ndef _check_name(method):\n ''\n\n\n\n\n\n \n def _check_name_wrapper(self,name=None ,*args,**kwargs):\n if name is None :\n name=self.name\n elif self.name !=name:\n raise ImportError(\"loader cannot handle %s\"%name,name=name)\n return method(self,name,*args,**kwargs)\n _wrap(_check_name_wrapper,method)\n return _check_name_wrapper\n \n \ndef _requires_builtin(fxn):\n ''\n def _requires_builtin_wrapper(self,fullname):\n if fullname not in sys.builtin_module_names:\n raise ImportError(\"{} is not a built-in module\".format(fullname),\n name=fullname)\n return fxn(self,fullname)\n _wrap(_requires_builtin_wrapper,fxn)\n return _requires_builtin_wrapper\n \n \ndef _requires_frozen(fxn):\n ''\n def _requires_frozen_wrapper(self,fullname):\n if not _imp.is_frozen(fullname):\n raise ImportError(\"{} is not a frozen module\".format(fullname),\n name=fullname)\n return fxn(self,fullname)\n _wrap(_requires_frozen_wrapper,fxn)\n return _requires_frozen_wrapper\n \n \ndef _find_module_shim(self,fullname):\n ''\n \n \n \n \n loader,portions=self.find_loader(fullname)\n if loader is None and len(portions):\n msg=\"Not importing directory {}: missing __init__\"\n _warnings.warn(msg.format(portions[0]),ImportWarning)\n return loader\n \n \n \n \n \n \nclass BuiltinImporter:\n\n ''\n\n\n\n\n \n \n @classmethod\n def module_repr(cls,module):\n return\"\".format(module.__name__)\n \n @classmethod\n def find_module(cls,fullname,path=None ):\n ''\n\n\n\n \n if path is not None :\n return None\n return cls if _imp.is_builtin(fullname)else None\n \n @classmethod\n @set_package\n @set_loader\n @_requires_builtin\n def load_module(cls,fullname):\n ''\n is_reload=fullname in sys.modules\n try :\n return _call_with_frames_removed(_imp.init_builtin,fullname)\n except :\n if not is_reload and fullname in sys.modules:\n del sys.modules[fullname]\n raise\n \n @classmethod\n @_requires_builtin\n def get_code(cls,fullname):\n ''\n return None\n \n @classmethod\n @_requires_builtin\n def get_source(cls,fullname):\n ''\n return None\n \n @classmethod\n @_requires_builtin\n def is_package(cls,fullname):\n ''\n return False\n \n \nclass FrozenImporter:\n\n ''\n\n\n\n\n \n \n @classmethod\n def module_repr(cls,m):\n return\"\".format(m.__name__)\n \n @classmethod\n def find_module(cls,fullname,path=None ):\n ''\n return cls if _imp.is_frozen(fullname)else None\n \n @classmethod\n @set_package\n @set_loader\n @_requires_frozen\n def load_module(cls,fullname):\n ''\n is_reload=fullname in sys.modules\n try :\n m=_call_with_frames_removed(_imp.init_frozen,fullname)\n \n del m.__file__\n return m\n except :\n if not is_reload and fullname in sys.modules:\n del sys.modules[fullname]\n raise\n \n @classmethod\n @_requires_frozen\n def get_code(cls,fullname):\n ''\n return _imp.get_frozen_object(fullname)\n \n @classmethod\n @_requires_frozen\n def get_source(cls,fullname):\n ''\n return None\n \n @classmethod\n @_requires_frozen\n def is_package(cls,fullname):\n ''\n return _imp.is_frozen_package(fullname)\n \n \nclass WindowsRegistryFinder:\n\n ''\n \n \n REGISTRY_KEY=(\n \"Software\\\\Python\\\\PythonCore\\\\{sys_version}\"\n \"\\\\Modules\\\\{fullname}\")\n REGISTRY_KEY_DEBUG=(\n \"Software\\\\Python\\\\PythonCore\\\\{sys_version}\"\n \"\\\\Modules\\\\{fullname}\\\\Debug\")\n DEBUG_BUILD=False\n \n @classmethod\n def _open_registry(cls,key):\n try :\n return _winreg.OpenKey(_winreg.HKEY_CURRENT_USER,key)\n except WindowsError:\n return _winreg.OpenKey(_winreg.HKEY_LOCAL_MACHINE,key)\n \n @classmethod\n def _search_registry(cls,fullname):\n if cls.DEBUG_BUILD:\n registry_key=cls.REGISTRY_KEY_DEBUG\n else :\n registry_key=cls.REGISTRY_KEY\n key=registry_key.format(fullname=fullname,\n sys_version=sys.version[:3])\n try :\n with cls._open_registry(key)as hkey:\n filepath=_winreg.QueryValue(hkey,\"\")\n except WindowsError:\n return None\n return filepath\n \n @classmethod\n def find_module(cls,fullname,path=None ):\n ''\n filepath=cls._search_registry(fullname)\n if filepath is None :\n return None\n try :\n _os.stat(filepath)\n except OSError:\n return None\n for loader,suffixes in _get_supported_file_loaders():\n if filepath.endswith(tuple(suffixes)):\n return loader(fullname,filepath)\n \n \nclass _LoaderBasics:\n\n ''\n \n \n def is_package(self,fullname):\n ''\n \n filename=_path_split(self.get_filename(fullname))[1]\n filename_base=filename.rsplit('.',1)[0]\n tail_name=fullname.rpartition('.')[2]\n return filename_base =='__init__'and tail_name !='__init__'\n \n def _bytes_from_bytecode(self,fullname,data,bytecode_path,source_stats):\n ''\n\n\n\n\n \n magic=data[:4]\n raw_timestamp=data[4:8]\n raw_size=data[8:12]\n if magic !=_MAGIC_BYTES:\n msg='bad magic number in {!r}: {!r}'.format(fullname,magic)\n _verbose_message(msg)\n raise ImportError(msg,name=fullname,path=bytecode_path)\n elif len(raw_timestamp)!=4:\n message='bad timestamp in {}'.format(fullname)\n _verbose_message(message)\n raise EOFError(message)\n elif len(raw_size)!=4:\n message='bad size in {}'.format(fullname)\n _verbose_message(message)\n raise EOFError(message)\n if source_stats is not None :\n try :\n source_mtime=int(source_stats['mtime'])\n except KeyError:\n pass\n else :\n if _r_long(raw_timestamp)!=source_mtime:\n message='bytecode is stale for {}'.format(fullname)\n _verbose_message(message)\n raise ImportError(message,name=fullname,\n path=bytecode_path)\n try :\n source_size=source_stats['size']&0xFFFFFFFF\n except KeyError:\n pass\n else :\n if _r_long(raw_size)!=source_size:\n raise ImportError(\n \"bytecode is stale for {}\".format(fullname),\n name=fullname,path=bytecode_path)\n \n \n return data[12:]\n \n @module_for_loader\n def _load_module(self,module,*,sourceless=False ):\n ''\n \n name=module.__name__\n code_object=self.get_code(name)\n module.__file__=self.get_filename(name)\n if not sourceless:\n try :\n module.__cached__=cache_from_source(module.__file__)\n except NotImplementedError:\n module.__cached__=module.__file__\n else :\n module.__cached__=module.__file__\n module.__package__=name\n if self.is_package(name):\n module.__path__=[_path_split(module.__file__)[0]]\n else :\n module.__package__=module.__package__.rpartition('.')[0]\n module.__loader__=self\n _call_with_frames_removed(exec,code_object,module.__dict__)\n return module\n \n \nclass SourceLoader(_LoaderBasics):\n\n def path_mtime(self,path):\n ''\n\n \n raise NotImplementedError\n \n def path_stats(self,path):\n ''\n\n\n\n\n\n\n\n \n return {'mtime':self.path_mtime(path)}\n \n def _cache_bytecode(self,source_path,cache_path,data):\n ''\n\n\n\n\n \n \n return self.set_data(cache_path,data)\n \n def set_data(self,path,data):\n ''\n\n\n\n \n raise NotImplementedError\n \n \n def get_source(self,fullname):\n ''\n import tokenize\n path=self.get_filename(fullname)\n try :\n source_bytes=self.get_data(path)\n except IOError as exc:\n raise ImportError(\"source not available through get_data()\",\n name=fullname)from exc\n readsource=_io.BytesIO(source_bytes).readline\n try :\n encoding=tokenize.detect_encoding(readsource)\n except SyntaxError as exc:\n raise ImportError(\"Failed to detect encoding\",\n name=fullname)from exc\n newline_decoder=_io.IncrementalNewlineDecoder(None ,True )\n try :\n return newline_decoder.decode(source_bytes.decode(encoding[0]))\n except UnicodeDecodeError as exc:\n raise ImportError(\"Failed to decode source file\",\n name=fullname)from exc\n \n def get_code(self,fullname):\n ''\n\n\n\n\n \n source_path=self.get_filename(fullname)\n source_mtime=None\n try :\n bytecode_path=cache_from_source(source_path)\n except NotImplementedError:\n bytecode_path=None\n else :\n try :\n st=self.path_stats(source_path)\n except NotImplementedError:\n pass\n else :\n source_mtime=int(st['mtime'])\n try :\n data=self.get_data(bytecode_path)\n except IOError:\n pass\n else :\n try :\n bytes_data=self._bytes_from_bytecode(fullname,data,\n bytecode_path,\n st)\n except (ImportError,EOFError):\n pass\n else :\n _verbose_message('{} matches {}',bytecode_path,\n source_path)\n found=marshal.loads(bytes_data)\n if isinstance(found,_code_type):\n _imp._fix_co_filename(found,source_path)\n _verbose_message('code object from {}',\n bytecode_path)\n return found\n else :\n msg=\"Non-code object in {}\"\n raise ImportError(msg.format(bytecode_path),\n name=fullname,path=bytecode_path)\n source_bytes=self.get_data(source_path)\n code_object=_call_with_frames_removed(compile,\n source_bytes,source_path,'exec',\n dont_inherit=True )\n _verbose_message('code object from {}',source_path)\n if (not sys.dont_write_bytecode and bytecode_path is not None and\n source_mtime is not None ):\n data=bytearray(_MAGIC_BYTES)\n data.extend(_w_long(source_mtime))\n data.extend(_w_long(len(source_bytes)))\n data.extend(marshal.dumps(code_object))\n try :\n self._cache_bytecode(source_path,bytecode_path,data)\n _verbose_message('wrote {!r}',bytecode_path)\n except NotImplementedError:\n pass\n return code_object\n \n def load_module(self,fullname):\n ''\n\n\n\n\n\n \n return self._load_module(fullname)\n \n \nclass FileLoader:\n\n ''\n \n \n def __init__(self,fullname,path):\n ''\n \n self.name=fullname\n self.path=path\n \n @_check_name\n def load_module(self,fullname):\n ''\n \n \n return super(FileLoader,self).load_module(fullname)\n \n @_check_name\n def get_filename(self,fullname):\n ''\n return self.path\n \n def get_data(self,path):\n ''\n with _io.FileIO(path,'r')as file:\n return file.read()\n \n \nclass SourceFileLoader(FileLoader,SourceLoader):\n\n ''\n \n def path_stats(self,path):\n ''\n st=_os.stat(path)\n return {'mtime':st.st_mtime,'size':st.st_size}\n \n def _cache_bytecode(self,source_path,bytecode_path,data):\n \n try :\n mode=_os.stat(source_path).st_mode\n except OSError:\n mode=0o666\n \n \n mode |=0o200\n return self.set_data(bytecode_path,data,_mode=mode)\n \n def set_data(self,path,data,*,_mode=0o666):\n ''\n parent,filename=_path_split(path)\n path_parts=[]\n \n while parent and not _path_isdir(parent):\n parent,part=_path_split(parent)\n path_parts.append(part)\n \n for part in reversed(path_parts):\n parent=_path_join(parent,part)\n try :\n _os.mkdir(parent)\n except FileExistsError:\n \n continue\n except OSError as exc:\n \n \n _verbose_message('could not create {!r}: {!r}',parent,exc)\n return\n try :\n _write_atomic(path,data,_mode)\n _verbose_message('created {!r}',path)\n except OSError as exc:\n \n _verbose_message('could not create {!r}: {!r}',path,exc)\n \n \nclass SourcelessFileLoader(FileLoader,_LoaderBasics):\n\n ''\n \n def load_module(self,fullname):\n return self._load_module(fullname,sourceless=True )\n \n def get_code(self,fullname):\n path=self.get_filename(fullname)\n data=self.get_data(path)\n bytes_data=self._bytes_from_bytecode(fullname,data,path,None )\n found=marshal.loads(bytes_data)\n if isinstance(found,_code_type):\n _verbose_message('code object from {!r}',path)\n return found\n else :\n raise ImportError(\"Non-code object in {}\".format(path),\n name=fullname,path=path)\n \n def get_source(self,fullname):\n ''\n return None\n \n \n \nEXTENSION_SUFFIXES=[]\n\n\nclass ExtensionFileLoader:\n\n ''\n\n\n\n \n \n def __init__(self,name,path):\n self.name=name\n self.path=path\n \n @_check_name\n @set_package\n @set_loader\n def load_module(self,fullname):\n ''\n is_reload=fullname in sys.modules\n try :\n module=_call_with_frames_removed(_imp.load_dynamic,\n fullname,self.path)\n _verbose_message('extension module loaded from {!r}',self.path)\n if self.is_package(fullname)and not hasattr(module,'__path__'):\n module.__path__=[_path_split(self.path)[0]]\n return module\n except :\n if not is_reload and fullname in sys.modules:\n del sys.modules[fullname]\n raise\n \n def is_package(self,fullname):\n ''\n file_name=_path_split(self.path)[1]\n return any(file_name =='__init__'+suffix\n for suffix in EXTENSION_SUFFIXES)\n \n def get_code(self,fullname):\n ''\n return None\n \n def get_source(self,fullname):\n ''\n return None\n \n \nclass _NamespacePath:\n ''\n\n\n\n \n \n def __init__(self,name,path,path_finder):\n self._name=name\n self._path=path\n self._last_parent_path=tuple(self._get_parent_path())\n self._path_finder=path_finder\n \n def _find_parent_path_names(self):\n ''\n parent,dot,me=self._name.rpartition('.')\n if dot =='':\n \n return'sys','path'\n \n \n return parent,'__path__'\n \n def _get_parent_path(self):\n parent_module_name,path_attr_name=self._find_parent_path_names()\n return getattr(sys.modules[parent_module_name],path_attr_name)\n \n def _recalculate(self):\n \n parent_path=tuple(self._get_parent_path())\n if parent_path !=self._last_parent_path:\n loader,new_path=self._path_finder(self._name,parent_path)\n \n \n if loader is None :\n self._path=new_path\n self._last_parent_path=parent_path\n return self._path\n \n def __iter__(self):\n return iter(self._recalculate())\n \n def __len__(self):\n return len(self._recalculate())\n \n def __repr__(self):\n return\"_NamespacePath({!r})\".format(self._path)\n \n def __contains__(self,item):\n return item in self._recalculate()\n \n def append(self,item):\n self._path.append(item)\n \n \nclass NamespaceLoader:\n def __init__(self,name,path,path_finder):\n self._path=_NamespacePath(name,path,path_finder)\n \n @classmethod\n def module_repr(cls,module):\n return\"\".format(module.__name__)\n \n @module_for_loader\n def load_module(self,module):\n ''\n _verbose_message('namespace module loaded with path {!r}',self._path)\n module.__path__=self._path\n return module\n \n \n \n \nclass PathFinder:\n\n ''\n \n @classmethod\n def invalidate_caches(cls):\n ''\n \n for finder in sys.path_importer_cache.values():\n if hasattr(finder,'invalidate_caches'):\n finder.invalidate_caches()\n \n @classmethod\n def _path_hooks(cls,path):\n ''\n\n\n\n \n if not sys.path_hooks:\n _warnings.warn('sys.path_hooks is empty',ImportWarning)\n for hook in sys.path_hooks:\n try :\n return hook(path)\n except ImportError:\n continue\n else :\n return None\n \n @classmethod\n def _path_importer_cache(cls,path):\n ''\n\n\n\n\n \n if path =='':\n path='.'\n try :\n finder=sys.path_importer_cache[path]\n except KeyError:\n finder=cls._path_hooks(path)\n sys.path_importer_cache[path]=finder\n return finder\n \n @classmethod\n def _get_loader(cls,fullname,path):\n ''\n \n \n namespace_path=[]\n for entry in path:\n if not isinstance(entry,(str,bytes)):\n continue\n finder=cls._path_importer_cache(entry)\n if finder is not None :\n if hasattr(finder,'find_loader'):\n loader,portions=finder.find_loader(fullname)\n else :\n loader=finder.find_module(fullname)\n portions=[]\n if loader is not None :\n \n return loader,namespace_path\n \n \n \n \n namespace_path.extend(portions)\n else :\n return None ,namespace_path\n \n @classmethod\n def find_module(cls,fullname,path=None ):\n ''\n \n if path is None :\n path=sys.path\n loader,namespace_path=cls._get_loader(fullname,path)\n if loader is not None :\n return loader\n else :\n if namespace_path:\n \n \n return NamespaceLoader(fullname,namespace_path,cls._get_loader)\n else :\n return None\n \n \nclass FileFinder:\n\n ''\n\n\n\n\n \n \n def __init__(self,path,*loader_details):\n ''\n\n \n loaders=[]\n for loader,suffixes in loader_details:\n loaders.extend((suffix,loader)for suffix in suffixes)\n self._loaders=loaders\n \n self.path=path or'.'\n self._path_mtime=-1\n self._path_cache=set()\n self._relaxed_path_cache=set()\n \n def invalidate_caches(self):\n ''\n self._path_mtime=-1\n \n find_module=_find_module_shim\n \n def find_loader(self,fullname):\n ''\n \n is_namespace=False\n tail_module=fullname.rpartition('.')[2]\n try :\n mtime=_os.stat(self.path).st_mtime\n except OSError:\n mtime=-1\n if mtime !=self._path_mtime:\n self._fill_cache()\n self._path_mtime=mtime\n \n if _relax_case():\n cache=self._relaxed_path_cache\n cache_module=tail_module.lower()\n else :\n cache=self._path_cache\n cache_module=tail_module\n \n if cache_module in cache:\n base_path=_path_join(self.path,tail_module)\n if _path_isdir(base_path):\n for suffix,loader in self._loaders:\n init_filename='__init__'+suffix\n full_path=_path_join(base_path,init_filename)\n if _path_isfile(full_path):\n return (loader(fullname,full_path),[base_path])\n else :\n \n \n is_namespace=True\n \n for suffix,loader in self._loaders:\n full_path=_path_join(self.path,tail_module+suffix)\n _verbose_message('trying {}'.format(full_path),verbosity=2)\n if cache_module+suffix in cache:\n if _path_isfile(full_path):\n return (loader(fullname,full_path),[])\n if is_namespace:\n _verbose_message('possible namespace for {}'.format(base_path))\n return (None ,[base_path])\n return (None ,[])\n \n def _fill_cache(self):\n ''\n path=self.path\n try :\n contents=_os.listdir(path)\n except (FileNotFoundError,PermissionError,NotADirectoryError):\n \n \n contents=[]\n \n \n if not sys.platform.startswith('win'):\n self._path_cache=set(contents)\n else :\n \n \n \n \n \n lower_suffix_contents=set()\n for item in contents:\n name,dot,suffix=item.partition('.')\n if dot:\n new_name='{}.{}'.format(name,suffix.lower())\n else :\n new_name=name\n lower_suffix_contents.add(new_name)\n self._path_cache=lower_suffix_contents\n if sys.platform.startswith(_CASE_INSENSITIVE_PLATFORMS):\n self._relaxed_path_cache=set(fn.lower()for fn in contents)\n \n @classmethod\n def path_hook(cls,*loader_details):\n ''\n\n\n\n\n\n\n \n def path_hook_for_FileFinder(path):\n ''\n if not _path_isdir(path):\n raise ImportError(\"only directories are supported\",path=path)\n return cls(path,*loader_details)\n \n return path_hook_for_FileFinder\n \n def __repr__(self):\n return\"FileFinder(%r)\"%(self.path,)\n \n \n \n \nclass _ImportLockContext:\n\n ''\n \n def __enter__(self):\n ''\n _imp.acquire_lock()\n \n def __exit__(self,exc_type,exc_value,exc_traceback):\n ''\n _imp.release_lock()\n \n \ndef _resolve_name(name,package,level):\n ''\n bits=package.rsplit('.',level -1)\n if len(bits)= 0')\n if package:\n if not isinstance(package,str):\n raise TypeError(\"__package__ not set to a string\")\n elif package not in sys.modules:\n msg=(\"Parent module {!r} not loaded, cannot perform relative \"\n \"import\")\n raise SystemError(msg.format(package))\n if not name and level ==0:\n raise ValueError(\"Empty module name\")\n \n \n_ERR_MSG='No module named {!r}'\n\ndef _find_and_load_unlocked(name,import_):\n path=None\n parent=name.rpartition('.')[0]\n if parent:\n if parent not in sys.modules:\n _call_with_frames_removed(import_,parent)\n \n if name in sys.modules:\n return sys.modules[name]\n \n parent_module=sys.modules[parent]\n try :\n path=parent_module.__path__\n except AttributeError:\n msg=(_ERR_MSG+'; {} is not a package').format(name,parent)\n raise ImportError(msg,name=name)\n loader=_find_module(name,path)\n if loader is None :\n exc=ImportError(_ERR_MSG.format(name),name=name)\n \n \n exc._not_found=True\n raise exc\n elif name not in sys.modules:\n \n loader.load_module(name)\n _verbose_message('import {!r} # {!r}',name,loader)\n \n module=sys.modules[name]\n if parent:\n \n parent_module=sys.modules[parent]\n setattr(parent_module,name.rpartition('.')[2],module)\n \n if getattr(module,'__package__',None )is None :\n try :\n module.__package__=module.__name__\n if not hasattr(module,'__path__'):\n module.__package__=module.__package__.rpartition('.')[0]\n except AttributeError:\n pass\n \n if not hasattr(module,'__loader__'):\n try :\n module.__loader__=loader\n except AttributeError:\n pass\n return module\n \n \ndef _find_and_load(name,import_):\n ''\n try :\n lock=_get_module_lock(name)\n finally :\n _imp.release_lock()\n lock.acquire()\n try :\n return _find_and_load_unlocked(name,import_)\n finally :\n lock.release()\n \n \ndef _gcd_import(name,package=None ,level=0):\n ''\n\n\n\n\n\n\n \n _sanity_check(name,package,level)\n if level >0:\n name=_resolve_name(name,package,level)\n _imp.acquire_lock()\n if name not in sys.modules:\n return _find_and_load(name,_gcd_import)\n module=sys.modules[name]\n if module is None :\n _imp.release_lock()\n message=(\"import of {} halted; \"\n \"None in sys.modules\".format(name))\n raise ImportError(message,name=name)\n _lock_unlock_module(name)\n return module\n \ndef _handle_fromlist(module,fromlist,import_):\n ''\n\n\n\n\n\n \n \n \n if hasattr(module,'__path__'):\n if'*'in fromlist:\n fromlist=list(fromlist)\n fromlist.remove('*')\n if hasattr(module,'__all__'):\n fromlist.extend(module.__all__)\n for x in fromlist:\n if not hasattr(module,x):\n from_name='{}.{}'.format(module.__name__,x)\n try :\n _call_with_frames_removed(import_,from_name)\n except ImportError as exc:\n \n \n \n \n \n if getattr(exc,'_not_found',False ):\n if exc.name ==from_name:\n continue\n raise\n return module\n \n \ndef _calc___package__(globals):\n ''\n\n\n\n\n \n package=globals.get('__package__')\n if package is None :\n package=globals['__name__']\n if'__path__'not in globals:\n package=package.rpartition('.')[0]\n return package\n \n \ndef _get_supported_file_loaders():\n ''\n\n\n \n extensions=ExtensionFileLoader,_imp.extension_suffixes()\n source=SourceFileLoader,SOURCE_SUFFIXES\n bytecode=SourcelessFileLoader,BYTECODE_SUFFIXES\n return [extensions,source,bytecode]\n \n \ndef __import__(name,globals=None ,locals=None ,fromlist=(),level=0,blocking=True ):\n ''\n\n\n\n\n\n\n\n\n \n if level ==0:\n module=_gcd_import(name)\n else :\n globals_=globals if globals is not None else {}\n package=_calc___package__(globals_)\n module=_gcd_import(name,package,level)\n if not fromlist:\n \n \n if level ==0:\n return _gcd_import(name.partition('.')[0])\n elif not name:\n return module\n else :\n \n \n cut_off=len(name)-len(name.partition('.')[0])\n \n \n return sys.modules[module.__name__[:len(module.__name__)-cut_off]]\n else :\n return _handle_fromlist(module,fromlist,_gcd_import)\n \n \n \ndef _setup(sys_module,_imp_module):\n ''\n\n\n\n\n\n \n \n global _imp,sys,BYTECODE_SUFFIXES\n _imp=_imp_module\n sys=sys_module\n \n if sys.flags.optimize:\n BYTECODE_SUFFIXES=OPTIMIZED_BYTECODE_SUFFIXES\n else :\n BYTECODE_SUFFIXES=DEBUG_BYTECODE_SUFFIXES\n \n module_type=type(sys)\n for name,module in sys.modules.items():\n if isinstance(module,module_type):\n if not hasattr(module,'__loader__'):\n if name in sys.builtin_module_names:\n module.__loader__=BuiltinImporter\n \n \n \n \n self_module=sys.modules[__name__]\n for builtin_name in ('_io','_warnings','builtins'):\n if builtin_name not in sys.modules:\n builtin_module=BuiltinImporter.load_module(builtin_name)\n else :\n builtin_module=sys.modules[builtin_name]\n setattr(self_module,builtin_name,builtin_module)\n \n os_details=('posix',['/']),('nt',['\\\\','/']),('os2',['\\\\','/'])\n for builtin_os,path_separators in os_details:\n \n assert all(len(sep)==1 for sep in path_separators)\n path_sep=path_separators[0]\n if builtin_os in sys.modules:\n os_module=sys.modules[builtin_os]\n break\n else :\n try :\n os_module=BuiltinImporter.load_module(builtin_os)\n \n if builtin_os =='os2'and'EMX GCC'in sys.version:\n path_sep=path_separators[1]\n break\n except ImportError:\n continue\n else :\n raise ImportError('importlib requires posix or nt')\n \n try :\n thread_module=BuiltinImporter.load_module('_thread')\n except ImportError:\n \n thread_module=None\n weakref_module=BuiltinImporter.load_module('_weakref')\n \n if builtin_os =='nt':\n winreg_module=BuiltinImporter.load_module('winreg')\n setattr(self_module,'_winreg',winreg_module)\n \n setattr(self_module,'_os',os_module)\n setattr(self_module,'_thread',thread_module)\n setattr(self_module,'_weakref',weakref_module)\n setattr(self_module,'path_sep',path_sep)\n setattr(self_module,'path_separators',set(path_separators))\n \n setattr(self_module,'_relax_case',_make_relax_case())\n EXTENSION_SUFFIXES.extend(_imp.extension_suffixes())\n if builtin_os =='nt':\n SOURCE_SUFFIXES.append('.pyw')\n if'_d.pyd'in EXTENSION_SUFFIXES:\n WindowsRegistryFinder.DEBUG_BUILD=True\n \ndef _install(sys_module,_imp_module):\n ''\n _setup(sys_module,_imp_module)\n supported_loaders=_get_supported_file_loaders()\n sys.path_hooks.extend([FileFinder.path_hook(*supported_loaders)])\n sys.meta_path.append(BuiltinImporter)\n sys.meta_path.append(FrozenImporter)\n if _os.__name__ =='nt':\n sys.meta_path.append(WindowsRegistryFinder)\n sys.meta_path.append(PathFinder)\n"], "locale": [".py", "''\n\n\n\n\n\n\n\n\n\n\n\n\nimport sys\nimport encodings\nimport encodings.aliases\nimport re\nimport collections\nfrom builtins import str as _builtin_str\nimport functools\n\n\n\n\n\n\n\n__all__=[\"getlocale\",\"getdefaultlocale\",\"getpreferredencoding\",\"Error\",\n\"setlocale\",\"resetlocale\",\"localeconv\",\"strcoll\",\"strxfrm\",\n\"str\",\"atof\",\"atoi\",\"format\",\"format_string\",\"currency\",\n\"normalize\",\"LC_CTYPE\",\"LC_COLLATE\",\"LC_TIME\",\"LC_MONETARY\",\n\"LC_NUMERIC\",\"LC_ALL\",\"CHAR_MAX\"]\n\ndef _strcoll(a,b):\n ''\n\n \n return (a >b)-(a .*?)\\))?'\nr'(?P[-#0-9 +*.hlL]*?)[eEfFgGdiouxXcrs%]')\n\ndef format(percent,value,grouping=False ,monetary=False ,*additional):\n ''\n\n\n\n \n \n match=_percent_re.match(percent)\n if not match or len(match.group())!=len(percent):\n raise ValueError((\"format() must be given exactly one %%char \"\n \"format specifier, %s not valid\")%repr(percent))\n return _format(percent,value,grouping,monetary,*additional)\n \ndef _format(percent,value,grouping=False ,monetary=False ,*additional):\n if additional:\n formatted=percent %((value,)+additional)\n else :\n formatted=percent %value\n \n if percent[-1]in'eEfFgG':\n seps=0\n parts=formatted.split('.')\n if grouping:\n parts[0],seps=_group(parts[0],monetary=monetary)\n decimal_point=localeconv()[monetary and'mon_decimal_point'\n or'decimal_point']\n formatted=decimal_point.join(parts)\n if seps:\n formatted=_strip_padding(formatted,seps)\n elif percent[-1]in'diu':\n seps=0\n if grouping:\n formatted,seps=_group(formatted,monetary=monetary)\n if seps:\n formatted=_strip_padding(formatted,seps)\n return formatted\n \ndef format_string(f,val,grouping=False ):\n ''\n\n \n percents=list(_percent_re.finditer(f))\n new_f=_percent_re.sub('%s',f)\n \n if isinstance(val,collections.Mapping):\n new_val=[]\n for perc in percents:\n if perc.group()[-1]=='%':\n new_val.append('%')\n else :\n new_val.append(format(perc.group(),val,grouping))\n else :\n if not isinstance(val,tuple):\n val=(val,)\n new_val=[]\n i=0\n for perc in percents:\n if perc.group()[-1]=='%':\n new_val.append('%')\n else :\n starcount=perc.group('modifiers').count('*')\n new_val.append(_format(perc.group(),\n val[i],\n grouping,\n False ,\n *val[i+1:i+1+starcount]))\n i +=(1+starcount)\n val=tuple(new_val)\n \n return new_f %val\n \ndef currency(val,symbol=True ,grouping=False ,international=False ):\n ''\n \n conv=localeconv()\n \n \n digits=conv[international and'int_frac_digits'or'frac_digits']\n if digits ==127:\n raise ValueError(\"Currency formatting is not possible using \"\n \"the 'C' locale.\")\n \n s=format('%%.%if'%digits,abs(val),grouping,monetary=True )\n \n s='<'+s+'>'\n \n if symbol:\n smb=conv[international and'int_curr_symbol'or'currency_symbol']\n precedes=conv[val <0 and'n_cs_precedes'or'p_cs_precedes']\n separated=conv[val <0 and'n_sep_by_space'or'p_sep_by_space']\n \n if precedes:\n s=smb+(separated and' 'or'')+s\n else :\n s=s+(separated and' 'or'')+smb\n \n sign_pos=conv[val <0 and'n_sign_posn'or'p_sign_posn']\n sign=conv[val <0 and'negative_sign'or'positive_sign']\n \n if sign_pos ==0:\n s='('+s+')'\n elif sign_pos ==1:\n s=sign+s\n elif sign_pos ==2:\n s=s+sign\n elif sign_pos ==3:\n s=s.replace('<',sign)\n elif sign_pos ==4:\n s=s.replace('>',sign)\n else :\n \n \n s=sign+s\n \n return s.replace('<','').replace('>','')\n \ndef str(val):\n ''\n return format(\"%.12g\",val)\n \ndef atof(string,func=float):\n ''\n \n ts=localeconv()['thousands_sep']\n if ts:\n string=string.replace(ts,'')\n \n dd=localeconv()['decimal_point']\n if dd:\n string=string.replace(dd,'.')\n \n return func(string)\n \ndef atoi(str):\n ''\n return atof(str,int)\n \ndef _test():\n setlocale(LC_ALL,\"\")\n \n s1=format(\"%d\",123456789,1)\n print(s1,\"is\",atoi(s1))\n \n s1=str(3.14)\n print(s1,\"is\",atof(s1))\n \n \n \n \n \n \n \n \n_setlocale=setlocale\n\ndef _replace_encoding(code,encoding):\n if'.'in code:\n langname=code[:code.index('.')]\n else :\n langname=code\n \n norm_encoding=encodings.normalize_encoding(encoding)\n \n norm_encoding=encodings.aliases.aliases.get(norm_encoding.lower(),\n norm_encoding)\n \n encoding=norm_encoding\n norm_encoding=norm_encoding.lower()\n if norm_encoding in locale_encoding_alias:\n encoding=locale_encoding_alias[norm_encoding]\n else :\n norm_encoding=norm_encoding.replace('_','')\n norm_encoding=norm_encoding.replace('-','')\n if norm_encoding in locale_encoding_alias:\n encoding=locale_encoding_alias[norm_encoding]\n \n return langname+'.'+encoding\n \ndef _append_modifier(code,modifier):\n if modifier =='euro':\n if'.'not in code:\n return code+'.ISO8859-15'\n _,_,encoding=code.partition('.')\n if encoding in ('ISO8859-15','UTF-8'):\n return code\n if encoding =='ISO8859-1':\n return _replace_encoding(code,'ISO8859-15')\n return code+'@'+modifier\n \ndef normalize(localename):\n\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n \n \n code=localename.lower()\n if':'in code:\n \n code=code.replace(':','.')\n if'@'in code:\n code,modifier=code.split('@',1)\n else :\n modifier=''\n if'.'in code:\n langname,encoding=code.split('.')[:2]\n else :\n langname=code\n encoding=''\n \n \n lang_enc=langname\n if encoding:\n norm_encoding=encoding.replace('-','')\n norm_encoding=norm_encoding.replace('_','')\n lang_enc +='.'+norm_encoding\n lookup_name=lang_enc\n if modifier:\n lookup_name +='@'+modifier\n code=locale_alias.get(lookup_name,None )\n if code is not None :\n return code\n \n \n if modifier:\n \n code=locale_alias.get(lang_enc,None )\n if code is not None :\n \n if'@'not in code:\n return _append_modifier(code,modifier)\n if code.split('@',1)[1].lower()==modifier:\n return code\n \n \n if encoding:\n \n lookup_name=langname\n if modifier:\n lookup_name +='@'+modifier\n code=locale_alias.get(lookup_name,None )\n if code is not None :\n \n if'@'not in code:\n return _replace_encoding(code,encoding)\n code,modifier=code.split('@',1)\n return _replace_encoding(code,encoding)+'@'+modifier\n \n if modifier:\n \n code=locale_alias.get(langname,None )\n if code is not None :\n \n if'@'not in code:\n code=_replace_encoding(code,encoding)\n return _append_modifier(code,modifier)\n code,defmod=code.split('@',1)\n if defmod.lower()==modifier:\n return _replace_encoding(code,encoding)+'@'+defmod\n \n return localename\n \ndef _parse_localename(localename):\n\n ''\n\n\n\n\n\n\n\n\n\n\n \n code=normalize(localename)\n if'@'in code:\n \n code,modifier=code.split('@',1)\n if modifier =='euro'and'.'not in code:\n \n \n \n return code,'iso-8859-15'\n \n if'.'in code:\n return tuple(code.split('.')[:2])\n elif code =='C':\n return None ,None\n raise ValueError('unknown locale: %s'%localename)\n \ndef _build_localename(localetuple):\n\n ''\n\n\n\n\n \n try :\n language,encoding=localetuple\n \n if language is None :\n language='C'\n if encoding is None :\n return language\n else :\n return language+'.'+encoding\n except (TypeError,ValueError):\n raise TypeError('Locale must be None, a string, or an iterable of two strings -- language code, encoding.')\n \ndef getdefaultlocale(envvars=('LC_ALL','LC_CTYPE','LANG','LANGUAGE')):\n\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n \n try :\n \n import _locale\n code,encoding=_locale._getdefaultlocale()\n except (ImportError,AttributeError):\n pass\n else :\n \n if sys.platform ==\"win32\"and code and code[:2]==\"0x\":\n \n code=windows_locale.get(int(code,0))\n \n \n return code,encoding\n \n \n import os\n lookup=os.environ.get\n for variable in envvars:\n localename=lookup(variable,None )\n if localename:\n if variable =='LANGUAGE':\n localename=localename.split(':')[0]\n break\n else :\n localename='C'\n return _parse_localename(localename)\n \n \ndef getlocale(category=LC_CTYPE):\n\n ''\n\n\n\n\n\n\n\n\n\n \n localename=_setlocale(category)\n if category ==LC_ALL and';'in localename:\n raise TypeError('category LC_ALL is not supported')\n return _parse_localename(localename)\n \ndef setlocale(category,locale=None ):\n\n ''\n\n\n\n\n\n\n\n\n \n if locale and not isinstance(locale,_builtin_str):\n \n locale=normalize(_build_localename(locale))\n return _setlocale(category,locale)\n \ndef resetlocale(category=LC_ALL):\n\n ''\n\n\n\n\n \n _setlocale(category,_build_localename(getdefaultlocale()))\n \nif sys.platform.startswith(\"win\"):\n\n def getpreferredencoding(do_setlocale=True ):\n ''\n import _bootlocale\n return _bootlocale.getpreferredencoding(False )\nelse :\n\n try :\n CODESET\n except NameError:\n \n def getpreferredencoding(do_setlocale=True ):\n ''\n \n res=getdefaultlocale()[1]\n if res is None :\n \n res='ascii'\n return res\n else :\n def getpreferredencoding(do_setlocale=True ):\n ''\n \n import _bootlocale\n if do_setlocale:\n oldloc=setlocale(LC_CTYPE)\n try :\n setlocale(LC_CTYPE,\"\")\n except Error:\n pass\n result=_bootlocale.getpreferredencoding(False )\n if do_setlocale:\n setlocale(LC_CTYPE,oldloc)\n return result\n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \nlocale_encoding_alias={\n\n\n'437':'C',\n'c':'C',\n'en':'ISO8859-1',\n'jis':'JIS7',\n'jis7':'JIS7',\n'ajec':'eucJP',\n'koi8c':'KOI8-C',\n'microsoftcp1251':'CP1251',\n'microsoftcp1255':'CP1255',\n'microsoftcp1256':'CP1256',\n'88591':'ISO8859-1',\n'88592':'ISO8859-2',\n'88595':'ISO8859-5',\n'885915':'ISO8859-15',\n\n\n'ascii':'ISO8859-1',\n'latin_1':'ISO8859-1',\n'iso8859_1':'ISO8859-1',\n'iso8859_10':'ISO8859-10',\n'iso8859_11':'ISO8859-11',\n'iso8859_13':'ISO8859-13',\n'iso8859_14':'ISO8859-14',\n'iso8859_15':'ISO8859-15',\n'iso8859_16':'ISO8859-16',\n'iso8859_2':'ISO8859-2',\n'iso8859_3':'ISO8859-3',\n'iso8859_4':'ISO8859-4',\n'iso8859_5':'ISO8859-5',\n'iso8859_6':'ISO8859-6',\n'iso8859_7':'ISO8859-7',\n'iso8859_8':'ISO8859-8',\n'iso8859_9':'ISO8859-9',\n'iso2022_jp':'JIS7',\n'shift_jis':'SJIS',\n'tactis':'TACTIS',\n'euc_jp':'eucJP',\n'euc_kr':'eucKR',\n'utf_8':'UTF-8',\n'koi8_r':'KOI8-R',\n'koi8_u':'KOI8-U',\n'cp1251':'CP1251',\n'cp1255':'CP1255',\n'cp1256':'CP1256',\n\n\n\n}\n\nfor k,v in sorted(locale_encoding_alias.items()):\n k=k.replace('_','')\n locale_encoding_alias.setdefault(k,v)\n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \nlocale_alias={\n'a3':'az_AZ.KOI8-C',\n'a3_az':'az_AZ.KOI8-C',\n'a3_az.koic':'az_AZ.KOI8-C',\n'aa_dj':'aa_DJ.ISO8859-1',\n'aa_er':'aa_ER.UTF-8',\n'aa_et':'aa_ET.UTF-8',\n'af':'af_ZA.ISO8859-1',\n'af_za':'af_ZA.ISO8859-1',\n'am':'am_ET.UTF-8',\n'am_et':'am_ET.UTF-8',\n'american':'en_US.ISO8859-1',\n'an_es':'an_ES.ISO8859-15',\n'ar':'ar_AA.ISO8859-6',\n'ar_aa':'ar_AA.ISO8859-6',\n'ar_ae':'ar_AE.ISO8859-6',\n'ar_bh':'ar_BH.ISO8859-6',\n'ar_dz':'ar_DZ.ISO8859-6',\n'ar_eg':'ar_EG.ISO8859-6',\n'ar_in':'ar_IN.UTF-8',\n'ar_iq':'ar_IQ.ISO8859-6',\n'ar_jo':'ar_JO.ISO8859-6',\n'ar_kw':'ar_KW.ISO8859-6',\n'ar_lb':'ar_LB.ISO8859-6',\n'ar_ly':'ar_LY.ISO8859-6',\n'ar_ma':'ar_MA.ISO8859-6',\n'ar_om':'ar_OM.ISO8859-6',\n'ar_qa':'ar_QA.ISO8859-6',\n'ar_sa':'ar_SA.ISO8859-6',\n'ar_sd':'ar_SD.ISO8859-6',\n'ar_sy':'ar_SY.ISO8859-6',\n'ar_tn':'ar_TN.ISO8859-6',\n'ar_ye':'ar_YE.ISO8859-6',\n'arabic':'ar_AA.ISO8859-6',\n'as':'as_IN.UTF-8',\n'as_in':'as_IN.UTF-8',\n'ast_es':'ast_ES.ISO8859-15',\n'ayc_pe':'ayc_PE.UTF-8',\n'az':'az_AZ.ISO8859-9E',\n'az_az':'az_AZ.ISO8859-9E',\n'az_az.iso88599e':'az_AZ.ISO8859-9E',\n'be':'be_BY.CP1251',\n'be@latin':'be_BY.UTF-8@latin',\n'be_bg.utf8':'bg_BG.UTF-8',\n'be_by':'be_BY.CP1251',\n'be_by@latin':'be_BY.UTF-8@latin',\n'bem_zm':'bem_ZM.UTF-8',\n'ber_dz':'ber_DZ.UTF-8',\n'ber_ma':'ber_MA.UTF-8',\n'bg':'bg_BG.CP1251',\n'bg_bg':'bg_BG.CP1251',\n'bho_in':'bho_IN.UTF-8',\n'bn_bd':'bn_BD.UTF-8',\n'bn_in':'bn_IN.UTF-8',\n'bo_cn':'bo_CN.UTF-8',\n'bo_in':'bo_IN.UTF-8',\n'bokmal':'nb_NO.ISO8859-1',\n'bokm\\xe5l':'nb_NO.ISO8859-1',\n'br':'br_FR.ISO8859-1',\n'br_fr':'br_FR.ISO8859-1',\n'brx_in':'brx_IN.UTF-8',\n'bs':'bs_BA.ISO8859-2',\n'bs_ba':'bs_BA.ISO8859-2',\n'bulgarian':'bg_BG.CP1251',\n'byn_er':'byn_ER.UTF-8',\n'c':'C',\n'c-french':'fr_CA.ISO8859-1',\n'c.ascii':'C',\n'c.en':'C',\n'c.iso88591':'en_US.ISO8859-1',\n'c.utf8':'en_US.UTF-8',\n'c_c':'C',\n'c_c.c':'C',\n'ca':'ca_ES.ISO8859-1',\n'ca_ad':'ca_AD.ISO8859-1',\n'ca_es':'ca_ES.ISO8859-1',\n'ca_es@valencia':'ca_ES.ISO8859-15@valencia',\n'ca_fr':'ca_FR.ISO8859-1',\n'ca_it':'ca_IT.ISO8859-1',\n'catalan':'ca_ES.ISO8859-1',\n'cextend':'en_US.ISO8859-1',\n'chinese-s':'zh_CN.eucCN',\n'chinese-t':'zh_TW.eucTW',\n'crh_ua':'crh_UA.UTF-8',\n'croatian':'hr_HR.ISO8859-2',\n'cs':'cs_CZ.ISO8859-2',\n'cs_cs':'cs_CZ.ISO8859-2',\n'cs_cz':'cs_CZ.ISO8859-2',\n'csb_pl':'csb_PL.UTF-8',\n'cv_ru':'cv_RU.UTF-8',\n'cy':'cy_GB.ISO8859-1',\n'cy_gb':'cy_GB.ISO8859-1',\n'cz':'cs_CZ.ISO8859-2',\n'cz_cz':'cs_CZ.ISO8859-2',\n'czech':'cs_CZ.ISO8859-2',\n'da':'da_DK.ISO8859-1',\n'da_dk':'da_DK.ISO8859-1',\n'danish':'da_DK.ISO8859-1',\n'dansk':'da_DK.ISO8859-1',\n'de':'de_DE.ISO8859-1',\n'de_at':'de_AT.ISO8859-1',\n'de_be':'de_BE.ISO8859-1',\n'de_ch':'de_CH.ISO8859-1',\n'de_de':'de_DE.ISO8859-1',\n'de_li.utf8':'de_LI.UTF-8',\n'de_lu':'de_LU.ISO8859-1',\n'deutsch':'de_DE.ISO8859-1',\n'doi_in':'doi_IN.UTF-8',\n'dutch':'nl_NL.ISO8859-1',\n'dutch.iso88591':'nl_BE.ISO8859-1',\n'dv_mv':'dv_MV.UTF-8',\n'dz_bt':'dz_BT.UTF-8',\n'ee':'ee_EE.ISO8859-4',\n'ee_ee':'ee_EE.ISO8859-4',\n'eesti':'et_EE.ISO8859-1',\n'el':'el_GR.ISO8859-7',\n'el_cy':'el_CY.ISO8859-7',\n'el_gr':'el_GR.ISO8859-7',\n'el_gr@euro':'el_GR.ISO8859-15',\n'en':'en_US.ISO8859-1',\n'en_ag':'en_AG.UTF-8',\n'en_au':'en_AU.ISO8859-1',\n'en_be':'en_BE.ISO8859-1',\n'en_bw':'en_BW.ISO8859-1',\n'en_ca':'en_CA.ISO8859-1',\n'en_dk':'en_DK.ISO8859-1',\n'en_dl.utf8':'en_DL.UTF-8',\n'en_gb':'en_GB.ISO8859-1',\n'en_hk':'en_HK.ISO8859-1',\n'en_ie':'en_IE.ISO8859-1',\n'en_in':'en_IN.ISO8859-1',\n'en_ng':'en_NG.UTF-8',\n'en_nz':'en_NZ.ISO8859-1',\n'en_ph':'en_PH.ISO8859-1',\n'en_sg':'en_SG.ISO8859-1',\n'en_uk':'en_GB.ISO8859-1',\n'en_us':'en_US.ISO8859-1',\n'en_us@euro@euro':'en_US.ISO8859-15',\n'en_za':'en_ZA.ISO8859-1',\n'en_zm':'en_ZM.UTF-8',\n'en_zw':'en_ZW.ISO8859-1',\n'en_zw.utf8':'en_ZS.UTF-8',\n'eng_gb':'en_GB.ISO8859-1',\n'english':'en_EN.ISO8859-1',\n'english_uk':'en_GB.ISO8859-1',\n'english_united-states':'en_US.ISO8859-1',\n'english_united-states.437':'C',\n'english_us':'en_US.ISO8859-1',\n'eo':'eo_XX.ISO8859-3',\n'eo.utf8':'eo.UTF-8',\n'eo_eo':'eo_EO.ISO8859-3',\n'eo_us.utf8':'eo_US.UTF-8',\n'eo_xx':'eo_XX.ISO8859-3',\n'es':'es_ES.ISO8859-1',\n'es_ar':'es_AR.ISO8859-1',\n'es_bo':'es_BO.ISO8859-1',\n'es_cl':'es_CL.ISO8859-1',\n'es_co':'es_CO.ISO8859-1',\n'es_cr':'es_CR.ISO8859-1',\n'es_cu':'es_CU.UTF-8',\n'es_do':'es_DO.ISO8859-1',\n'es_ec':'es_EC.ISO8859-1',\n'es_es':'es_ES.ISO8859-1',\n'es_gt':'es_GT.ISO8859-1',\n'es_hn':'es_HN.ISO8859-1',\n'es_mx':'es_MX.ISO8859-1',\n'es_ni':'es_NI.ISO8859-1',\n'es_pa':'es_PA.ISO8859-1',\n'es_pe':'es_PE.ISO8859-1',\n'es_pr':'es_PR.ISO8859-1',\n'es_py':'es_PY.ISO8859-1',\n'es_sv':'es_SV.ISO8859-1',\n'es_us':'es_US.ISO8859-1',\n'es_uy':'es_UY.ISO8859-1',\n'es_ve':'es_VE.ISO8859-1',\n'estonian':'et_EE.ISO8859-1',\n'et':'et_EE.ISO8859-15',\n'et_ee':'et_EE.ISO8859-15',\n'eu':'eu_ES.ISO8859-1',\n'eu_es':'eu_ES.ISO8859-1',\n'eu_fr':'eu_FR.ISO8859-1',\n'fa':'fa_IR.UTF-8',\n'fa_ir':'fa_IR.UTF-8',\n'fa_ir.isiri3342':'fa_IR.ISIRI-3342',\n'ff_sn':'ff_SN.UTF-8',\n'fi':'fi_FI.ISO8859-15',\n'fi_fi':'fi_FI.ISO8859-15',\n'fil_ph':'fil_PH.UTF-8',\n'finnish':'fi_FI.ISO8859-1',\n'fo':'fo_FO.ISO8859-1',\n'fo_fo':'fo_FO.ISO8859-1',\n'fr':'fr_FR.ISO8859-1',\n'fr_be':'fr_BE.ISO8859-1',\n'fr_ca':'fr_CA.ISO8859-1',\n'fr_ch':'fr_CH.ISO8859-1',\n'fr_fr':'fr_FR.ISO8859-1',\n'fr_lu':'fr_LU.ISO8859-1',\n'fran\\xe7ais':'fr_FR.ISO8859-1',\n'fre_fr':'fr_FR.ISO8859-1',\n'french':'fr_FR.ISO8859-1',\n'french.iso88591':'fr_CH.ISO8859-1',\n'french_france':'fr_FR.ISO8859-1',\n'fur_it':'fur_IT.UTF-8',\n'fy_de':'fy_DE.UTF-8',\n'fy_nl':'fy_NL.UTF-8',\n'ga':'ga_IE.ISO8859-1',\n'ga_ie':'ga_IE.ISO8859-1',\n'galego':'gl_ES.ISO8859-1',\n'galician':'gl_ES.ISO8859-1',\n'gd':'gd_GB.ISO8859-1',\n'gd_gb':'gd_GB.ISO8859-1',\n'ger_de':'de_DE.ISO8859-1',\n'german':'de_DE.ISO8859-1',\n'german.iso88591':'de_CH.ISO8859-1',\n'german_germany':'de_DE.ISO8859-1',\n'gez_er':'gez_ER.UTF-8',\n'gez_et':'gez_ET.UTF-8',\n'gl':'gl_ES.ISO8859-1',\n'gl_es':'gl_ES.ISO8859-1',\n'greek':'el_GR.ISO8859-7',\n'gu_in':'gu_IN.UTF-8',\n'gv':'gv_GB.ISO8859-1',\n'gv_gb':'gv_GB.ISO8859-1',\n'ha_ng':'ha_NG.UTF-8',\n'he':'he_IL.ISO8859-8',\n'he_il':'he_IL.ISO8859-8',\n'hebrew':'he_IL.ISO8859-8',\n'hi':'hi_IN.ISCII-DEV',\n'hi_in':'hi_IN.ISCII-DEV',\n'hi_in.isciidev':'hi_IN.ISCII-DEV',\n'hne':'hne_IN.UTF-8',\n'hne_in':'hne_IN.UTF-8',\n'hr':'hr_HR.ISO8859-2',\n'hr_hr':'hr_HR.ISO8859-2',\n'hrvatski':'hr_HR.ISO8859-2',\n'hsb_de':'hsb_DE.ISO8859-2',\n'ht_ht':'ht_HT.UTF-8',\n'hu':'hu_HU.ISO8859-2',\n'hu_hu':'hu_HU.ISO8859-2',\n'hungarian':'hu_HU.ISO8859-2',\n'hy_am':'hy_AM.UTF-8',\n'hy_am.armscii8':'hy_AM.ARMSCII_8',\n'ia':'ia.UTF-8',\n'ia_fr':'ia_FR.UTF-8',\n'icelandic':'is_IS.ISO8859-1',\n'id':'id_ID.ISO8859-1',\n'id_id':'id_ID.ISO8859-1',\n'ig_ng':'ig_NG.UTF-8',\n'ik_ca':'ik_CA.UTF-8',\n'in':'id_ID.ISO8859-1',\n'in_id':'id_ID.ISO8859-1',\n'is':'is_IS.ISO8859-1',\n'is_is':'is_IS.ISO8859-1',\n'iso-8859-1':'en_US.ISO8859-1',\n'iso-8859-15':'en_US.ISO8859-15',\n'iso8859-1':'en_US.ISO8859-1',\n'iso8859-15':'en_US.ISO8859-15',\n'iso_8859_1':'en_US.ISO8859-1',\n'iso_8859_15':'en_US.ISO8859-15',\n'it':'it_IT.ISO8859-1',\n'it_ch':'it_CH.ISO8859-1',\n'it_it':'it_IT.ISO8859-1',\n'italian':'it_IT.ISO8859-1',\n'iu':'iu_CA.NUNACOM-8',\n'iu_ca':'iu_CA.NUNACOM-8',\n'iu_ca.nunacom8':'iu_CA.NUNACOM-8',\n'iw':'he_IL.ISO8859-8',\n'iw_il':'he_IL.ISO8859-8',\n'iw_il.utf8':'iw_IL.UTF-8',\n'ja':'ja_JP.eucJP',\n'ja_jp':'ja_JP.eucJP',\n'ja_jp.euc':'ja_JP.eucJP',\n'ja_jp.mscode':'ja_JP.SJIS',\n'ja_jp.pck':'ja_JP.SJIS',\n'japan':'ja_JP.eucJP',\n'japanese':'ja_JP.eucJP',\n'japanese-euc':'ja_JP.eucJP',\n'japanese.euc':'ja_JP.eucJP',\n'jp_jp':'ja_JP.eucJP',\n'ka':'ka_GE.GEORGIAN-ACADEMY',\n'ka_ge':'ka_GE.GEORGIAN-ACADEMY',\n'ka_ge.georgianacademy':'ka_GE.GEORGIAN-ACADEMY',\n'ka_ge.georgianps':'ka_GE.GEORGIAN-PS',\n'ka_ge.georgianrs':'ka_GE.GEORGIAN-ACADEMY',\n'kk_kz':'kk_KZ.RK1048',\n'kl':'kl_GL.ISO8859-1',\n'kl_gl':'kl_GL.ISO8859-1',\n'km_kh':'km_KH.UTF-8',\n'kn':'kn_IN.UTF-8',\n'kn_in':'kn_IN.UTF-8',\n'ko':'ko_KR.eucKR',\n'ko_kr':'ko_KR.eucKR',\n'ko_kr.euc':'ko_KR.eucKR',\n'kok_in':'kok_IN.UTF-8',\n'korean':'ko_KR.eucKR',\n'korean.euc':'ko_KR.eucKR',\n'ks':'ks_IN.UTF-8',\n'ks_in':'ks_IN.UTF-8',\n'ks_in@devanagari.utf8':'ks_IN.UTF-8@devanagari',\n'ku_tr':'ku_TR.ISO8859-9',\n'kw':'kw_GB.ISO8859-1',\n'kw_gb':'kw_GB.ISO8859-1',\n'ky':'ky_KG.UTF-8',\n'ky_kg':'ky_KG.UTF-8',\n'lb_lu':'lb_LU.UTF-8',\n'lg_ug':'lg_UG.ISO8859-10',\n'li_be':'li_BE.UTF-8',\n'li_nl':'li_NL.UTF-8',\n'lij_it':'lij_IT.UTF-8',\n'lithuanian':'lt_LT.ISO8859-13',\n'lo':'lo_LA.MULELAO-1',\n'lo_la':'lo_LA.MULELAO-1',\n'lo_la.cp1133':'lo_LA.IBM-CP1133',\n'lo_la.ibmcp1133':'lo_LA.IBM-CP1133',\n'lo_la.mulelao1':'lo_LA.MULELAO-1',\n'lt':'lt_LT.ISO8859-13',\n'lt_lt':'lt_LT.ISO8859-13',\n'lv':'lv_LV.ISO8859-13',\n'lv_lv':'lv_LV.ISO8859-13',\n'mag_in':'mag_IN.UTF-8',\n'mai':'mai_IN.UTF-8',\n'mai_in':'mai_IN.UTF-8',\n'mg_mg':'mg_MG.ISO8859-15',\n'mhr_ru':'mhr_RU.UTF-8',\n'mi':'mi_NZ.ISO8859-1',\n'mi_nz':'mi_NZ.ISO8859-1',\n'mk':'mk_MK.ISO8859-5',\n'mk_mk':'mk_MK.ISO8859-5',\n'ml':'ml_IN.UTF-8',\n'ml_in':'ml_IN.UTF-8',\n'mn_mn':'mn_MN.UTF-8',\n'mni_in':'mni_IN.UTF-8',\n'mr':'mr_IN.UTF-8',\n'mr_in':'mr_IN.UTF-8',\n'ms':'ms_MY.ISO8859-1',\n'ms_my':'ms_MY.ISO8859-1',\n'mt':'mt_MT.ISO8859-3',\n'mt_mt':'mt_MT.ISO8859-3',\n'my_mm':'my_MM.UTF-8',\n'nan_tw@latin':'nan_TW.UTF-8@latin',\n'nb':'nb_NO.ISO8859-1',\n'nb_no':'nb_NO.ISO8859-1',\n'nds_de':'nds_DE.UTF-8',\n'nds_nl':'nds_NL.UTF-8',\n'ne_np':'ne_NP.UTF-8',\n'nhn_mx':'nhn_MX.UTF-8',\n'niu_nu':'niu_NU.UTF-8',\n'niu_nz':'niu_NZ.UTF-8',\n'nl':'nl_NL.ISO8859-1',\n'nl_aw':'nl_AW.UTF-8',\n'nl_be':'nl_BE.ISO8859-1',\n'nl_nl':'nl_NL.ISO8859-1',\n'nn':'nn_NO.ISO8859-1',\n'nn_no':'nn_NO.ISO8859-1',\n'no':'no_NO.ISO8859-1',\n'no@nynorsk':'ny_NO.ISO8859-1',\n'no_no':'no_NO.ISO8859-1',\n'no_no.iso88591@bokmal':'no_NO.ISO8859-1',\n'no_no.iso88591@nynorsk':'no_NO.ISO8859-1',\n'norwegian':'no_NO.ISO8859-1',\n'nr':'nr_ZA.ISO8859-1',\n'nr_za':'nr_ZA.ISO8859-1',\n'nso':'nso_ZA.ISO8859-15',\n'nso_za':'nso_ZA.ISO8859-15',\n'ny':'ny_NO.ISO8859-1',\n'ny_no':'ny_NO.ISO8859-1',\n'nynorsk':'nn_NO.ISO8859-1',\n'oc':'oc_FR.ISO8859-1',\n'oc_fr':'oc_FR.ISO8859-1',\n'om_et':'om_ET.UTF-8',\n'om_ke':'om_KE.ISO8859-1',\n'or':'or_IN.UTF-8',\n'or_in':'or_IN.UTF-8',\n'os_ru':'os_RU.UTF-8',\n'pa':'pa_IN.UTF-8',\n'pa_in':'pa_IN.UTF-8',\n'pa_pk':'pa_PK.UTF-8',\n'pap_an':'pap_AN.UTF-8',\n'pd':'pd_US.ISO8859-1',\n'pd_de':'pd_DE.ISO8859-1',\n'pd_us':'pd_US.ISO8859-1',\n'ph':'ph_PH.ISO8859-1',\n'ph_ph':'ph_PH.ISO8859-1',\n'pl':'pl_PL.ISO8859-2',\n'pl_pl':'pl_PL.ISO8859-2',\n'polish':'pl_PL.ISO8859-2',\n'portuguese':'pt_PT.ISO8859-1',\n'portuguese_brazil':'pt_BR.ISO8859-1',\n'posix':'C',\n'posix-utf2':'C',\n'pp':'pp_AN.ISO8859-1',\n'pp_an':'pp_AN.ISO8859-1',\n'ps_af':'ps_AF.UTF-8',\n'pt':'pt_PT.ISO8859-1',\n'pt_br':'pt_BR.ISO8859-1',\n'pt_pt':'pt_PT.ISO8859-1',\n'ro':'ro_RO.ISO8859-2',\n'ro_ro':'ro_RO.ISO8859-2',\n'romanian':'ro_RO.ISO8859-2',\n'ru':'ru_RU.UTF-8',\n'ru_ru':'ru_RU.UTF-8',\n'ru_ua':'ru_UA.KOI8-U',\n'rumanian':'ro_RO.ISO8859-2',\n'russian':'ru_RU.ISO8859-5',\n'rw':'rw_RW.ISO8859-1',\n'rw_rw':'rw_RW.ISO8859-1',\n'sa_in':'sa_IN.UTF-8',\n'sat_in':'sat_IN.UTF-8',\n'sc_it':'sc_IT.UTF-8',\n'sd':'sd_IN.UTF-8',\n'sd_in':'sd_IN.UTF-8',\n'sd_in@devanagari.utf8':'sd_IN.UTF-8@devanagari',\n'sd_pk':'sd_PK.UTF-8',\n'se_no':'se_NO.UTF-8',\n'serbocroatian':'sr_RS.UTF-8@latin',\n'sh':'sr_RS.UTF-8@latin',\n'sh_ba.iso88592@bosnia':'sr_CS.ISO8859-2',\n'sh_hr':'sh_HR.ISO8859-2',\n'sh_hr.iso88592':'hr_HR.ISO8859-2',\n'sh_sp':'sr_CS.ISO8859-2',\n'sh_yu':'sr_RS.UTF-8@latin',\n'shs_ca':'shs_CA.UTF-8',\n'si':'si_LK.UTF-8',\n'si_lk':'si_LK.UTF-8',\n'sid_et':'sid_ET.UTF-8',\n'sinhala':'si_LK.UTF-8',\n'sk':'sk_SK.ISO8859-2',\n'sk_sk':'sk_SK.ISO8859-2',\n'sl':'sl_SI.ISO8859-2',\n'sl_cs':'sl_CS.ISO8859-2',\n'sl_si':'sl_SI.ISO8859-2',\n'slovak':'sk_SK.ISO8859-2',\n'slovene':'sl_SI.ISO8859-2',\n'slovenian':'sl_SI.ISO8859-2',\n'so_dj':'so_DJ.ISO8859-1',\n'so_et':'so_ET.UTF-8',\n'so_ke':'so_KE.ISO8859-1',\n'so_so':'so_SO.ISO8859-1',\n'sp':'sr_CS.ISO8859-5',\n'sp_yu':'sr_CS.ISO8859-5',\n'spanish':'es_ES.ISO8859-1',\n'spanish_spain':'es_ES.ISO8859-1',\n'sq':'sq_AL.ISO8859-2',\n'sq_al':'sq_AL.ISO8859-2',\n'sq_mk':'sq_MK.UTF-8',\n'sr':'sr_RS.UTF-8',\n'sr@cyrillic':'sr_RS.UTF-8',\n'sr@latn':'sr_CS.UTF-8@latin',\n'sr_cs':'sr_CS.UTF-8',\n'sr_cs.iso88592@latn':'sr_CS.ISO8859-2',\n'sr_cs@latn':'sr_CS.UTF-8@latin',\n'sr_me':'sr_ME.UTF-8',\n'sr_rs':'sr_RS.UTF-8',\n'sr_rs@latn':'sr_RS.UTF-8@latin',\n'sr_sp':'sr_CS.ISO8859-2',\n'sr_yu':'sr_RS.UTF-8@latin',\n'sr_yu.cp1251@cyrillic':'sr_CS.CP1251',\n'sr_yu.iso88592':'sr_CS.ISO8859-2',\n'sr_yu.iso88595':'sr_CS.ISO8859-5',\n'sr_yu.iso88595@cyrillic':'sr_CS.ISO8859-5',\n'sr_yu.microsoftcp1251@cyrillic':'sr_CS.CP1251',\n'sr_yu.utf8':'sr_RS.UTF-8',\n'sr_yu.utf8@cyrillic':'sr_RS.UTF-8',\n'sr_yu@cyrillic':'sr_RS.UTF-8',\n'ss':'ss_ZA.ISO8859-1',\n'ss_za':'ss_ZA.ISO8859-1',\n'st':'st_ZA.ISO8859-1',\n'st_za':'st_ZA.ISO8859-1',\n'sv':'sv_SE.ISO8859-1',\n'sv_fi':'sv_FI.ISO8859-1',\n'sv_se':'sv_SE.ISO8859-1',\n'sw_ke':'sw_KE.UTF-8',\n'sw_tz':'sw_TZ.UTF-8',\n'swedish':'sv_SE.ISO8859-1',\n'szl_pl':'szl_PL.UTF-8',\n'ta':'ta_IN.TSCII-0',\n'ta_in':'ta_IN.TSCII-0',\n'ta_in.tscii':'ta_IN.TSCII-0',\n'ta_in.tscii0':'ta_IN.TSCII-0',\n'ta_lk':'ta_LK.UTF-8',\n'te':'te_IN.UTF-8',\n'te_in':'te_IN.UTF-8',\n'tg':'tg_TJ.KOI8-C',\n'tg_tj':'tg_TJ.KOI8-C',\n'th':'th_TH.ISO8859-11',\n'th_th':'th_TH.ISO8859-11',\n'th_th.tactis':'th_TH.TIS620',\n'th_th.tis620':'th_TH.TIS620',\n'thai':'th_TH.ISO8859-11',\n'ti_er':'ti_ER.UTF-8',\n'ti_et':'ti_ET.UTF-8',\n'tig_er':'tig_ER.UTF-8',\n'tk_tm':'tk_TM.UTF-8',\n'tl':'tl_PH.ISO8859-1',\n'tl_ph':'tl_PH.ISO8859-1',\n'tn':'tn_ZA.ISO8859-15',\n'tn_za':'tn_ZA.ISO8859-15',\n'tr':'tr_TR.ISO8859-9',\n'tr_cy':'tr_CY.ISO8859-9',\n'tr_tr':'tr_TR.ISO8859-9',\n'ts':'ts_ZA.ISO8859-1',\n'ts_za':'ts_ZA.ISO8859-1',\n'tt':'tt_RU.TATAR-CYR',\n'tt_ru':'tt_RU.TATAR-CYR',\n'tt_ru.tatarcyr':'tt_RU.TATAR-CYR',\n'tt_ru@iqtelif':'tt_RU.UTF-8@iqtelif',\n'turkish':'tr_TR.ISO8859-9',\n'ug_cn':'ug_CN.UTF-8',\n'uk':'uk_UA.KOI8-U',\n'uk_ua':'uk_UA.KOI8-U',\n'univ':'en_US.utf',\n'universal':'en_US.utf',\n'universal.utf8@ucs4':'en_US.UTF-8',\n'unm_us':'unm_US.UTF-8',\n'ur':'ur_PK.CP1256',\n'ur_in':'ur_IN.UTF-8',\n'ur_pk':'ur_PK.CP1256',\n'uz':'uz_UZ.UTF-8',\n'uz_uz':'uz_UZ.UTF-8',\n'uz_uz@cyrillic':'uz_UZ.UTF-8',\n've':'ve_ZA.UTF-8',\n've_za':'ve_ZA.UTF-8',\n'vi':'vi_VN.TCVN',\n'vi_vn':'vi_VN.TCVN',\n'vi_vn.tcvn':'vi_VN.TCVN',\n'vi_vn.tcvn5712':'vi_VN.TCVN',\n'vi_vn.viscii':'vi_VN.VISCII',\n'vi_vn.viscii111':'vi_VN.VISCII',\n'wa':'wa_BE.ISO8859-1',\n'wa_be':'wa_BE.ISO8859-1',\n'wae_ch':'wae_CH.UTF-8',\n'wal_et':'wal_ET.UTF-8',\n'wo_sn':'wo_SN.UTF-8',\n'xh':'xh_ZA.ISO8859-1',\n'xh_za':'xh_ZA.ISO8859-1',\n'yi':'yi_US.CP1255',\n'yi_us':'yi_US.CP1255',\n'yo_ng':'yo_NG.UTF-8',\n'yue_hk':'yue_HK.UTF-8',\n'zh':'zh_CN.eucCN',\n'zh_cn':'zh_CN.gb2312',\n'zh_cn.big5':'zh_TW.big5',\n'zh_cn.euc':'zh_CN.eucCN',\n'zh_hk':'zh_HK.big5hkscs',\n'zh_hk.big5hk':'zh_HK.big5hkscs',\n'zh_sg':'zh_SG.GB2312',\n'zh_sg.gbk':'zh_SG.GBK',\n'zh_tw':'zh_TW.big5',\n'zh_tw.euc':'zh_TW.eucTW',\n'zh_tw.euctw':'zh_TW.eucTW',\n'zu':'zu_ZA.ISO8859-1',\n'zu_za':'zu_ZA.ISO8859-1',\n}\n\n\n\n\n\n\n\n\n\n\n\n\n\n\nwindows_locale={\n0x0436:\"af_ZA\",\n0x041c:\"sq_AL\",\n0x0484:\"gsw_FR\",\n0x045e:\"am_ET\",\n0x0401:\"ar_SA\",\n0x0801:\"ar_IQ\",\n0x0c01:\"ar_EG\",\n0x1001:\"ar_LY\",\n0x1401:\"ar_DZ\",\n0x1801:\"ar_MA\",\n0x1c01:\"ar_TN\",\n0x2001:\"ar_OM\",\n0x2401:\"ar_YE\",\n0x2801:\"ar_SY\",\n0x2c01:\"ar_JO\",\n0x3001:\"ar_LB\",\n0x3401:\"ar_KW\",\n0x3801:\"ar_AE\",\n0x3c01:\"ar_BH\",\n0x4001:\"ar_QA\",\n0x042b:\"hy_AM\",\n0x044d:\"as_IN\",\n0x042c:\"az_AZ\",\n0x082c:\"az_AZ\",\n0x046d:\"ba_RU\",\n0x042d:\"eu_ES\",\n0x0423:\"be_BY\",\n0x0445:\"bn_IN\",\n0x201a:\"bs_BA\",\n0x141a:\"bs_BA\",\n0x047e:\"br_FR\",\n0x0402:\"bg_BG\",\n\n0x0403:\"ca_ES\",\n0x0004:\"zh_CHS\",\n0x0404:\"zh_TW\",\n0x0804:\"zh_CN\",\n0x0c04:\"zh_HK\",\n0x1004:\"zh_SG\",\n0x1404:\"zh_MO\",\n0x7c04:\"zh_CHT\",\n0x0483:\"co_FR\",\n0x041a:\"hr_HR\",\n0x101a:\"hr_BA\",\n0x0405:\"cs_CZ\",\n0x0406:\"da_DK\",\n0x048c:\"gbz_AF\",\n0x0465:\"div_MV\",\n0x0413:\"nl_NL\",\n0x0813:\"nl_BE\",\n0x0409:\"en_US\",\n0x0809:\"en_GB\",\n0x0c09:\"en_AU\",\n0x1009:\"en_CA\",\n0x1409:\"en_NZ\",\n0x1809:\"en_IE\",\n0x1c09:\"en_ZA\",\n0x2009:\"en_JA\",\n0x2409:\"en_CB\",\n0x2809:\"en_BZ\",\n0x2c09:\"en_TT\",\n0x3009:\"en_ZW\",\n0x3409:\"en_PH\",\n0x4009:\"en_IN\",\n0x4409:\"en_MY\",\n0x4809:\"en_IN\",\n0x0425:\"et_EE\",\n0x0438:\"fo_FO\",\n0x0464:\"fil_PH\",\n0x040b:\"fi_FI\",\n0x040c:\"fr_FR\",\n0x080c:\"fr_BE\",\n0x0c0c:\"fr_CA\",\n0x100c:\"fr_CH\",\n0x140c:\"fr_LU\",\n0x180c:\"fr_MC\",\n0x0462:\"fy_NL\",\n0x0456:\"gl_ES\",\n0x0437:\"ka_GE\",\n0x0407:\"de_DE\",\n0x0807:\"de_CH\",\n0x0c07:\"de_AT\",\n0x1007:\"de_LU\",\n0x1407:\"de_LI\",\n0x0408:\"el_GR\",\n0x046f:\"kl_GL\",\n0x0447:\"gu_IN\",\n0x0468:\"ha_NG\",\n0x040d:\"he_IL\",\n0x0439:\"hi_IN\",\n0x040e:\"hu_HU\",\n0x040f:\"is_IS\",\n0x0421:\"id_ID\",\n0x045d:\"iu_CA\",\n0x085d:\"iu_CA\",\n0x083c:\"ga_IE\",\n0x0410:\"it_IT\",\n0x0810:\"it_CH\",\n0x0411:\"ja_JP\",\n0x044b:\"kn_IN\",\n0x043f:\"kk_KZ\",\n0x0453:\"kh_KH\",\n0x0486:\"qut_GT\",\n0x0487:\"rw_RW\",\n0x0457:\"kok_IN\",\n0x0412:\"ko_KR\",\n0x0440:\"ky_KG\",\n0x0454:\"lo_LA\",\n0x0426:\"lv_LV\",\n0x0427:\"lt_LT\",\n0x082e:\"dsb_DE\",\n0x046e:\"lb_LU\",\n0x042f:\"mk_MK\",\n0x043e:\"ms_MY\",\n0x083e:\"ms_BN\",\n0x044c:\"ml_IN\",\n0x043a:\"mt_MT\",\n0x0481:\"mi_NZ\",\n0x047a:\"arn_CL\",\n0x044e:\"mr_IN\",\n0x047c:\"moh_CA\",\n0x0450:\"mn_MN\",\n0x0850:\"mn_CN\",\n0x0461:\"ne_NP\",\n0x0414:\"nb_NO\",\n0x0814:\"nn_NO\",\n0x0482:\"oc_FR\",\n0x0448:\"or_IN\",\n0x0463:\"ps_AF\",\n0x0429:\"fa_IR\",\n0x0415:\"pl_PL\",\n0x0416:\"pt_BR\",\n0x0816:\"pt_PT\",\n0x0446:\"pa_IN\",\n0x046b:\"quz_BO\",\n0x086b:\"quz_EC\",\n0x0c6b:\"quz_PE\",\n0x0418:\"ro_RO\",\n0x0417:\"rm_CH\",\n0x0419:\"ru_RU\",\n0x243b:\"smn_FI\",\n0x103b:\"smj_NO\",\n0x143b:\"smj_SE\",\n0x043b:\"se_NO\",\n0x083b:\"se_SE\",\n0x0c3b:\"se_FI\",\n0x203b:\"sms_FI\",\n0x183b:\"sma_NO\",\n0x1c3b:\"sma_SE\",\n0x044f:\"sa_IN\",\n0x0c1a:\"sr_SP\",\n0x1c1a:\"sr_BA\",\n0x081a:\"sr_SP\",\n0x181a:\"sr_BA\",\n0x045b:\"si_LK\",\n0x046c:\"ns_ZA\",\n0x0432:\"tn_ZA\",\n0x041b:\"sk_SK\",\n0x0424:\"sl_SI\",\n0x040a:\"es_ES\",\n0x080a:\"es_MX\",\n0x0c0a:\"es_ES\",\n0x100a:\"es_GT\",\n0x140a:\"es_CR\",\n0x180a:\"es_PA\",\n0x1c0a:\"es_DO\",\n0x200a:\"es_VE\",\n0x240a:\"es_CO\",\n0x280a:\"es_PE\",\n0x2c0a:\"es_AR\",\n0x300a:\"es_EC\",\n0x340a:\"es_CL\",\n0x380a:\"es_UR\",\n0x3c0a:\"es_PY\",\n0x400a:\"es_BO\",\n0x440a:\"es_SV\",\n0x480a:\"es_HN\",\n0x4c0a:\"es_NI\",\n0x500a:\"es_PR\",\n0x540a:\"es_US\",\n\n0x0441:\"sw_KE\",\n0x041d:\"sv_SE\",\n0x081d:\"sv_FI\",\n0x045a:\"syr_SY\",\n0x0428:\"tg_TJ\",\n0x085f:\"tmz_DZ\",\n0x0449:\"ta_IN\",\n0x0444:\"tt_RU\",\n0x044a:\"te_IN\",\n0x041e:\"th_TH\",\n0x0851:\"bo_BT\",\n0x0451:\"bo_CN\",\n0x041f:\"tr_TR\",\n0x0442:\"tk_TM\",\n0x0480:\"ug_CN\",\n0x0422:\"uk_UA\",\n0x042e:\"wen_DE\",\n0x0420:\"ur_PK\",\n0x0820:\"ur_IN\",\n0x0443:\"uz_UZ\",\n0x0843:\"uz_UZ\",\n0x042a:\"vi_VN\",\n0x0452:\"cy_GB\",\n0x0488:\"wo_SN\",\n0x0434:\"xh_ZA\",\n0x0485:\"sah_RU\",\n0x0478:\"ii_CN\",\n0x046a:\"yo_NG\",\n0x0435:\"zu_ZA\",\n}\n\ndef _print_locale():\n\n ''\n \n categories={}\n def _init_categories(categories=categories):\n for k,v in globals().items():\n if k[:3]=='LC_':\n categories[k]=v\n _init_categories()\n del categories['LC_ALL']\n \n print('Locale defaults as determined by getdefaultlocale():')\n print('-'*72)\n lang,enc=getdefaultlocale()\n print('Language: ',lang or'(undefined)')\n print('Encoding: ',enc or'(undefined)')\n print()\n \n print('Locale settings on startup:')\n print('-'*72)\n for name,category in categories.items():\n print(name,'...')\n lang,enc=getlocale(category)\n print(' Language: ',lang or'(undefined)')\n print(' Encoding: ',enc or'(undefined)')\n print()\n \n print()\n print('Locale settings after calling resetlocale():')\n print('-'*72)\n resetlocale()\n for name,category in categories.items():\n print(name,'...')\n lang,enc=getlocale(category)\n print(' Language: ',lang or'(undefined)')\n print(' Encoding: ',enc or'(undefined)')\n print()\n \n try :\n setlocale(LC_ALL,\"\")\n except :\n print('NOTE:')\n print('setlocale(LC_ALL, \"\") does not support the default locale')\n print('given in the OS environment variables.')\n else :\n print()\n print('Locale settings after calling setlocale(LC_ALL, \"\"):')\n print('-'*72)\n for name,category in categories.items():\n print(name,'...')\n lang,enc=getlocale(category)\n print(' Language: ',lang or'(undefined)')\n print(' Encoding: ',enc or'(undefined)')\n print()\n \n \n \ntry :\n LC_MESSAGES\nexcept NameError:\n pass\nelse :\n __all__.append(\"LC_MESSAGES\")\n \nif __name__ =='__main__':\n print('Locale aliasing:')\n print()\n _print_locale()\n print()\n print('Number formatting:')\n print()\n _test()\n"], "asyncio.proactor_events": [".py", "''\n\n\n\n\n\n__all__=['BaseProactorEventLoop']\n\nimport socket\nimport sys\nimport warnings\n\nfrom .import base_events\nfrom .import constants\nfrom .import futures\nfrom .import sslproto\nfrom .import transports\nfrom .log import logger\n\n\nclass _ProactorBasePipeTransport(transports._FlowControlMixin,\ntransports.BaseTransport):\n ''\n \n def __init__(self,loop,sock,protocol,waiter=None ,\n extra=None ,server=None ):\n super().__init__(extra,loop)\n self._set_extra(sock)\n self._sock=sock\n self._protocol=protocol\n self._server=server\n self._buffer=None\n self._read_fut=None\n self._write_fut=None\n self._pending_write=0\n self._conn_lost=0\n self._closing=False\n self._eof_written=False\n if self._server is not None :\n self._server._attach()\n self._loop.call_soon(self._protocol.connection_made,self)\n if waiter is not None :\n \n self._loop.call_soon(waiter._set_result_unless_cancelled,None )\n \n def __repr__(self):\n info=[self.__class__.__name__]\n if self._sock is None :\n info.append('closed')\n elif self._closing:\n info.append('closing')\n if self._sock is not None :\n info.append('fd=%s'%self._sock.fileno())\n if self._read_fut is not None :\n info.append('read=%s'%self._read_fut)\n if self._write_fut is not None :\n info.append(\"write=%r\"%self._write_fut)\n if self._buffer:\n bufsize=len(self._buffer)\n info.append('write_bufsize=%s'%bufsize)\n if self._eof_written:\n info.append('EOF written')\n return'<%s>'%' '.join(info)\n \n def _set_extra(self,sock):\n self._extra['pipe']=sock\n \n def close(self):\n if self._closing:\n return\n self._closing=True\n self._conn_lost +=1\n if not self._buffer and self._write_fut is None :\n self._loop.call_soon(self._call_connection_lost,None )\n if self._read_fut is not None :\n self._read_fut.cancel()\n self._read_fut=None\n \n \n \n \n if sys.version_info >=(3,4):\n def __del__(self):\n if self._sock is not None :\n warnings.warn(\"unclosed transport %r\"%self,ResourceWarning)\n self.close()\n \n def _fatal_error(self,exc,message='Fatal error on pipe transport'):\n if isinstance(exc,(BrokenPipeError,ConnectionResetError)):\n if self._loop.get_debug():\n logger.debug(\"%r: %s\",self,message,exc_info=True )\n else :\n self._loop.call_exception_handler({\n 'message':message,\n 'exception':exc,\n 'transport':self,\n 'protocol':self._protocol,\n })\n self._force_close(exc)\n \n def _force_close(self,exc):\n if self._closing:\n return\n self._closing=True\n self._conn_lost +=1\n if self._write_fut:\n self._write_fut.cancel()\n self._write_fut=None\n if self._read_fut:\n self._read_fut.cancel()\n self._read_fut=None\n self._pending_write=0\n self._buffer=None\n self._loop.call_soon(self._call_connection_lost,exc)\n \n def _call_connection_lost(self,exc):\n try :\n self._protocol.connection_lost(exc)\n finally :\n \n \n \n \n if hasattr(self._sock,'shutdown'):\n self._sock.shutdown(socket.SHUT_RDWR)\n self._sock.close()\n self._sock=None\n server=self._server\n if server is not None :\n server._detach()\n self._server=None\n \n def get_write_buffer_size(self):\n size=self._pending_write\n if self._buffer is not None :\n size +=len(self._buffer)\n return size\n \n \nclass _ProactorReadPipeTransport(_ProactorBasePipeTransport,\ntransports.ReadTransport):\n ''\n \n def __init__(self,loop,sock,protocol,waiter=None ,\n extra=None ,server=None ):\n super().__init__(loop,sock,protocol,waiter,extra,server)\n self._paused=False\n self._loop.call_soon(self._loop_reading)\n \n def pause_reading(self):\n if self._closing:\n raise RuntimeError('Cannot pause_reading() when closing')\n if self._paused:\n raise RuntimeError('Already paused')\n self._paused=True\n if self._loop.get_debug():\n logger.debug(\"%r pauses reading\",self)\n \n def resume_reading(self):\n if not self._paused:\n raise RuntimeError('Not paused')\n self._paused=False\n if self._closing:\n return\n self._loop.call_soon(self._loop_reading,self._read_fut)\n if self._loop.get_debug():\n logger.debug(\"%r resumes reading\",self)\n \n def _loop_reading(self,fut=None ):\n if self._paused:\n return\n data=None\n \n try :\n if fut is not None :\n assert self._read_fut is fut or (self._read_fut is None and\n self._closing)\n self._read_fut=None\n data=fut.result()\n \n if self._closing:\n \n data=None\n return\n \n if data ==b'':\n \n return\n \n \n self._read_fut=self._loop._proactor.recv(self._sock,4096)\n except ConnectionAbortedError as exc:\n if not self._closing:\n self._fatal_error(exc,'Fatal read error on pipe transport')\n elif self._loop.get_debug():\n logger.debug(\"Read error on pipe transport while closing\",\n exc_info=True )\n except ConnectionResetError as exc:\n self._force_close(exc)\n except OSError as exc:\n self._fatal_error(exc,'Fatal read error on pipe transport')\n except futures.CancelledError:\n if not self._closing:\n raise\n else :\n self._read_fut.add_done_callback(self._loop_reading)\n finally :\n if data:\n self._protocol.data_received(data)\n elif data is not None :\n if self._loop.get_debug():\n logger.debug(\"%r received EOF\",self)\n keep_open=self._protocol.eof_received()\n if not keep_open:\n self.close()\n \n \nclass _ProactorBaseWritePipeTransport(_ProactorBasePipeTransport,\ntransports.WriteTransport):\n ''\n \n def write(self,data):\n if not isinstance(data,(bytes,bytearray,memoryview)):\n raise TypeError('data argument must be byte-ish (%r)',\n type(data))\n if self._eof_written:\n raise RuntimeError('write_eof() already called')\n \n if not data:\n return\n \n if self._conn_lost:\n if self._conn_lost >=constants.LOG_THRESHOLD_FOR_CONNLOST_WRITES:\n logger.warning('socket.send() raised exception.')\n self._conn_lost +=1\n return\n \n \n \n \n \n \n \n if self._write_fut is None :\n assert self._buffer is None\n \n self._loop_writing(data=bytes(data))\n elif not self._buffer:\n \n self._buffer=bytearray(data)\n self._maybe_pause_protocol()\n else :\n \n self._buffer.extend(data)\n self._maybe_pause_protocol()\n \n def _loop_writing(self,f=None ,data=None ):\n try :\n assert f is self._write_fut\n self._write_fut=None\n self._pending_write=0\n if f:\n f.result()\n if data is None :\n data=self._buffer\n self._buffer=None\n if not data:\n if self._closing:\n self._loop.call_soon(self._call_connection_lost,None )\n if self._eof_written:\n self._sock.shutdown(socket.SHUT_WR)\n \n \n \n \n \n self._maybe_resume_protocol()\n else :\n self._write_fut=self._loop._proactor.send(self._sock,data)\n if not self._write_fut.done():\n assert self._pending_write ==0\n self._pending_write=len(data)\n self._write_fut.add_done_callback(self._loop_writing)\n self._maybe_pause_protocol()\n else :\n self._write_fut.add_done_callback(self._loop_writing)\n except ConnectionResetError as exc:\n self._force_close(exc)\n except OSError as exc:\n self._fatal_error(exc,'Fatal write error on pipe transport')\n \n def can_write_eof(self):\n return True\n \n def write_eof(self):\n self.close()\n \n def abort(self):\n self._force_close(None )\n \n \nclass _ProactorWritePipeTransport(_ProactorBaseWritePipeTransport):\n def __init__(self,*args,**kw):\n super().__init__(*args,**kw)\n self._read_fut=self._loop._proactor.recv(self._sock,16)\n self._read_fut.add_done_callback(self._pipe_closed)\n \n def _pipe_closed(self,fut):\n if fut.cancelled():\n \n return\n assert fut.result()==b''\n if self._closing:\n assert self._read_fut is None\n return\n assert fut is self._read_fut,(fut,self._read_fut)\n self._read_fut=None\n if self._write_fut is not None :\n self._force_close(BrokenPipeError())\n else :\n self.close()\n \n \nclass _ProactorDuplexPipeTransport(_ProactorReadPipeTransport,\n_ProactorBaseWritePipeTransport,\ntransports.Transport):\n ''\n \n def can_write_eof(self):\n return False\n \n def write_eof(self):\n raise NotImplementedError\n \n \nclass _ProactorSocketTransport(_ProactorReadPipeTransport,\n_ProactorBaseWritePipeTransport,\ntransports.Transport):\n ''\n \n def _set_extra(self,sock):\n self._extra['socket']=sock\n try :\n self._extra['sockname']=sock.getsockname()\n except (socket.error,AttributeError):\n if self._loop.get_debug():\n logger.warning(\"getsockname() failed on %r\",\n sock,exc_info=True )\n if'peername'not in self._extra:\n try :\n self._extra['peername']=sock.getpeername()\n except (socket.error,AttributeError):\n if self._loop.get_debug():\n logger.warning(\"getpeername() failed on %r\",\n sock,exc_info=True )\n \n def can_write_eof(self):\n return True\n \n def write_eof(self):\n if self._closing or self._eof_written:\n return\n self._eof_written=True\n if self._write_fut is None :\n self._sock.shutdown(socket.SHUT_WR)\n \n \nclass BaseProactorEventLoop(base_events.BaseEventLoop):\n\n def __init__(self,proactor):\n super().__init__()\n logger.debug('Using proactor: %s',proactor.__class__.__name__)\n self._proactor=proactor\n self._selector=proactor\n self._self_reading_future=None\n self._accept_futures={}\n proactor.set_loop(self)\n self._make_self_pipe()\n \n def _make_socket_transport(self,sock,protocol,waiter=None ,\n extra=None ,server=None ):\n return _ProactorSocketTransport(self,sock,protocol,waiter,\n extra,server)\n \n def _make_ssl_transport(self,rawsock,protocol,sslcontext,waiter=None ,\n *,server_side=False ,server_hostname=None ,\n extra=None ,server=None ):\n if not sslproto._is_sslproto_available():\n raise NotImplementedError(\"Proactor event loop requires Python 3.5\"\n \" or newer (ssl.MemoryBIO) to support \"\n \"SSL\")\n \n ssl_protocol=sslproto.SSLProtocol(self,protocol,sslcontext,waiter,\n server_side,server_hostname)\n _ProactorSocketTransport(self,rawsock,ssl_protocol,\n extra=extra,server=server)\n return ssl_protocol._app_transport\n \n def _make_duplex_pipe_transport(self,sock,protocol,waiter=None ,\n extra=None ):\n return _ProactorDuplexPipeTransport(self,\n sock,protocol,waiter,extra)\n \n def _make_read_pipe_transport(self,sock,protocol,waiter=None ,\n extra=None ):\n return _ProactorReadPipeTransport(self,sock,protocol,waiter,extra)\n \n def _make_write_pipe_transport(self,sock,protocol,waiter=None ,\n extra=None ):\n \n return _ProactorWritePipeTransport(self,\n sock,protocol,waiter,extra)\n \n def close(self):\n if self.is_running():\n raise RuntimeError(\"Cannot close a running event loop\")\n if self.is_closed():\n return\n \n \n \n \n self._stop_accept_futures()\n self._close_self_pipe()\n self._proactor.close()\n self._proactor=None\n self._selector=None\n \n \n super().close()\n \n def sock_recv(self,sock,n):\n return self._proactor.recv(sock,n)\n \n def sock_sendall(self,sock,data):\n return self._proactor.send(sock,data)\n \n def sock_connect(self,sock,address):\n try :\n if self._debug:\n base_events._check_resolved_address(sock,address)\n except ValueError as err:\n fut=futures.Future(loop=self)\n fut.set_exception(err)\n return fut\n else :\n return self._proactor.connect(sock,address)\n \n def sock_accept(self,sock):\n return self._proactor.accept(sock)\n \n def _socketpair(self):\n raise NotImplementedError\n \n def _close_self_pipe(self):\n if self._self_reading_future is not None :\n self._self_reading_future.cancel()\n self._self_reading_future=None\n self._ssock.close()\n self._ssock=None\n self._csock.close()\n self._csock=None\n self._internal_fds -=1\n \n def _make_self_pipe(self):\n \n self._ssock,self._csock=self._socketpair()\n self._ssock.setblocking(False )\n self._csock.setblocking(False )\n self._internal_fds +=1\n self.call_soon(self._loop_self_reading)\n \n def _loop_self_reading(self,f=None ):\n try :\n if f is not None :\n f.result()\n f=self._proactor.recv(self._ssock,4096)\n except futures.CancelledError:\n \n return\n except Exception as exc:\n self.call_exception_handler({\n 'message':'Error on reading from the event loop self pipe',\n 'exception':exc,\n 'loop':self,\n })\n else :\n self._self_reading_future=f\n f.add_done_callback(self._loop_self_reading)\n \n def _write_to_self(self):\n self._csock.send(b'\\0')\n \n def _start_serving(self,protocol_factory,sock,\n sslcontext=None ,server=None ):\n \n def loop(f=None ):\n try :\n if f is not None :\n conn,addr=f.result()\n if self._debug:\n logger.debug(\"%r got a new connection from %r: %r\",\n server,addr,conn)\n protocol=protocol_factory()\n if sslcontext is not None :\n self._make_ssl_transport(\n conn,protocol,sslcontext,server_side=True ,\n extra={'peername':addr},server=server)\n else :\n self._make_socket_transport(\n conn,protocol,\n extra={'peername':addr},server=server)\n if self.is_closed():\n return\n f=self._proactor.accept(sock)\n except OSError as exc:\n if sock.fileno()!=-1:\n self.call_exception_handler({\n 'message':'Accept failed on a socket',\n 'exception':exc,\n 'socket':sock,\n })\n sock.close()\n elif self._debug:\n logger.debug(\"Accept failed on socket %r\",\n sock,exc_info=True )\n except futures.CancelledError:\n sock.close()\n else :\n self._accept_futures[sock.fileno()]=f\n f.add_done_callback(loop)\n \n self.call_soon(loop)\n \n def _process_events(self,event_list):\n \n pass\n \n def _stop_accept_futures(self):\n for future in self._accept_futures.values():\n future.cancel()\n self._accept_futures.clear()\n \n def _stop_serving(self,sock):\n self._stop_accept_futures()\n self._proactor._stop_serving(sock)\n sock.close()\n"], "_ajax": [".js", "// ajax\nvar $module = (function($B){\n\neval($B.InjectBuiltins())\nvar $N = $B.builtins.None\n\n\nfunction ajax(){\n\n if (window.XMLHttpRequest){// code for IE7+, Firefox, Chrome, Opera, Safari\n var xmlhttp=new XMLHttpRequest();\n }else{// code for IE6, IE5\n var xmlhttp=new ActiveXObject(\"Microsoft.XMLHTTP\");\n }\n xmlhttp.onreadystatechange = function(){\n // here, \"this\" refers to xmlhttp\n var state = this.readyState\n res.js.text = this.responseText\n var timer = this.$requestTimer\n if(state===0 && this.onuninitialized){this.onuninitialized(res)}\n else if(state===1 && this.onloading){this.onloading(res)}\n else if(state===2 && this.onloaded){this.onloaded(res)}\n else if(state===3 && this.oninteractive){this.oninteractive(res)}\n else if(state===4 && this.oncomplete){\n if(timer !== null){window.clearTimeout(timer)}\n this.oncomplete(res)\n }\n }\n var res = {\n __class__: ajax.$dict, \n js: xmlhttp,\n headers: {}\n }\n return res\n}\n\nfunction ajax1(){\n if (window.XMLHttpRequest){// code for IE7+, Firefox, Chrome, Opera, Safari\n var xmlhttp=new XMLHttpRequest();\n }else{// code for IE6, IE5\n var xmlhttp=new ActiveXObject(\"Microsoft.XMLHTTP\");\n }\n xmlhttp.onreadystatechange = function(){\n // here, \"this\" refers to xmlhttp\n var state = this.readyState\n var timer = this.$requestTimer\n if(state===0 && this.onuninitialized){this.onuninitialized()}\n else if(state===1 && this.onloading){this.onloading()}\n else if(state===2 && this.onloaded){this.onloaded()}\n else if(state===3 && this.oninteractive){this.oninteractive()}\n else if(state===4 && this.oncomplete){\n if(timer !== null){window.clearTimeout(timer)}\n this.oncomplete()\n }\n }\n return {\n __class__: ajax.$dict, \n js: xmlhttp,\n headers: {}\n }\n}\najax1.__class__ = $B.$factory\n\najax.__class__ = $B.$factory\n\nvar add_to_res = function(res,key,val) {\n if (isinstance(val,list)) {\n for (j = 0; j < val.length; j++) {\n add_to_res(res,key,val[j])\n }\n } else if (val instanceof File || val instanceof Blob) {\n res.append(key,val)\n } else res.append(key,str(val))\n}\n\najax.$dict = {\n\n __class__:$B.$type,\n __name__:'ajax',\n $factory: ajax,\n \n __getattribute__ : function(self, attr){\n // Special case for send : accept dict as parameters\n if(attr=='send'){\n return function(params){\n return ajax.$dict.send(self, params)\n }\n }\n // Otherwise default to JSObject method\n return $B.JSObject.$dict.__getattribute__(self, attr)\n },\n \n __repr__ : function(self){return ''},\n __str__ : function(self){return ''},\n \n bind : function(self, evt, func){\n // req.bind(evt,func) is the same as req.onevt = func\n self.js['on'+evt] = function(){\n try{\n return func.apply(null, arguments)\n }catch(err){\n if(err.__class__!==undefined){\n var msg = _b_.getattr(err, 'info')+\n '\\n'+err.__class__.__name__\n if(err.args){msg += ': '+err.args[0]}\n try{getattr($B.stderr,\"write\")(msg)}\n catch(err){console.log(msg)}\n }else{\n try{getattr($B.stderr,\"write\")(err)}\n catch(err1){console.log(err)}\n }\n }\n }\n return $N\n },\n \n send : function(self,params){\n // params can be Python dictionary or string\n //self.js.onreadystatechange = function(ev){console.log(ev.target)}\n var res = ''\n if(!params){\n self.js.send();\n return $N;\n }else if(isinstance(params,str)){\n res = params\n }else if(isinstance(params,dict)){\n if(self.headers['content-type'] == 'multipart/form-data'){\n // The FormData object serializes the data in the 'multipart/form-data'\n // content-type so we may as well override that header if it was set\n // by the user.\n res = new FormData()\n var items = _b_.list(_b_.dict.$dict.items(params))\n for(var i=0, _len_i = items.length; i < _len_i;i++){\n add_to_res(res,str(items[i][0]),items[i][1])\n }\n }else{\n var items = _b_.list(_b_.dict.$dict.items(params))\n for(var i=0, _len_i = items.length; i < _len_i;i++){\n var key = encodeURIComponent(str(items[i][0]));\n if (isinstance(items[i][1],list)) {\n for (j = 0; j < items[i][1].length; j++) {\n res += key +'=' + encodeURIComponent(str(items[i][1][j])) + '&'\n }\n } else {\n res += key + '=' + encodeURIComponent(str(items[i][1])) + '&'\n }\n }\n res = res.substr(0,res.length-1)\n }\n }else{\n throw _b_.TypeError(\"send() argument must be string or dictionary, not '\"+str(params.__class__)+\"'\")\n }\n self.js.send(res)\n return $N\n },\n \n set_header : function(self,key,value){\n self.js.setRequestHeader(key,value)\n self.headers[key.toLowerCase()] = value.toLowerCase()\n },\n \n set_timeout : function(self,seconds,func){\n self.js.$requestTimer = setTimeout(\n function() {self.js.abort();func()},\n seconds*1000);\n }\n}\n\najax.$dict.__mro__ = [$B.JSObject.$dict, _b_.object.$dict]\n\n$B.set_func_names(ajax.$dict)\n\nreturn {ajax:ajax, ajax1:ajax1}\n\n})(__BRYTHON__)\n"], "functools": [".py", "''\n\n\n\n\n\n\n\n\n\n__all__=['update_wrapper','wraps','WRAPPER_ASSIGNMENTS','WRAPPER_UPDATES',\n'total_ordering','cmp_to_key','lru_cache','reduce','partial']\n\nfrom _functools import partial,reduce\nfrom collections import namedtuple\ntry :\n from _thread import RLock\nexcept :\n class RLock:\n ''\n def __enter__(self):pass\n def __exit__(self,exctype,excinst,exctb):pass\n \n \n \n \n \n \n \n \n \nWRAPPER_ASSIGNMENTS=('__module__','__name__','__qualname__','__doc__',\n'__annotations__')\nWRAPPER_UPDATES=('__dict__',)\ndef update_wrapper(wrapper,\nwrapped,\nassigned=WRAPPER_ASSIGNMENTS,\nupdated=WRAPPER_UPDATES):\n ''\n\n\n\n\n\n\n\n\n\n \n wrapper.__wrapped__=wrapped\n for attr in assigned:\n try :\n value=getattr(wrapped,attr)\n except AttributeError:\n pass\n else :\n setattr(wrapper,attr,value)\n for attr in updated:\n getattr(wrapper,attr).update(getattr(wrapped,attr,{}))\n \n return wrapper\n \ndef wraps(wrapped,\nassigned=WRAPPER_ASSIGNMENTS,\nupdated=WRAPPER_UPDATES):\n ''\n\n\n\n\n\n\n \n return partial(update_wrapper,wrapped=wrapped,\n assigned=assigned,updated=updated)\n \n \n \n \n \n \ndef total_ordering(cls):\n ''\n convert={\n '__lt__':[('__gt__',lambda self,other:not (self other or self ==other)),\n ('__ge__',lambda self,other:self >other or self ==other),\n ('__le__',lambda self,other:not self >other)],\n '__ge__':[('__le__',lambda self,other:(not self >=other)or self ==other),\n ('__gt__',lambda self,other:self >=other and not self ==other),\n ('__lt__',lambda self,other:not self >=other)]\n }\n \n roots=[op for op in convert if getattr(cls,op,None )is not getattr(object,op,None )]\n if not roots:\n raise ValueError('must define at least one ordering operation: < > <= >=')\n root=max(roots)\n for opname,opfunc in convert[root]:\n if opname not in roots:\n opfunc.__name__=opname\n opfunc.__doc__=getattr(int,opname).__doc__\n setattr(cls,opname,opfunc)\n return cls\n \n \n \n \n \n \ndef cmp_to_key(mycmp):\n ''\n class K(object):\n __slots__=['obj']\n def __init__(self,obj):\n self.obj=obj\n def __lt__(self,other):\n return mycmp(self.obj,other.obj)<0\n def __gt__(self,other):\n return mycmp(self.obj,other.obj)>0\n def __eq__(self,other):\n return mycmp(self.obj,other.obj)==0\n def __le__(self,other):\n return mycmp(self.obj,other.obj)<=0\n def __ge__(self,other):\n return mycmp(self.obj,other.obj)>=0\n def __ne__(self,other):\n return mycmp(self.obj,other.obj)!=0\n __hash__=None\n return K\n \ntry :\n from _functools import cmp_to_key\nexcept ImportError:\n pass\n \n \n \n \n \n \n_CacheInfo=namedtuple(\"CacheInfo\",[\"hits\",\"misses\",\"maxsize\",\"currsize\"])\n\nclass _HashedSeq(list):\n ''\n\n\n\n \n \n __slots__='hashvalue'\n \n def __init__(self,tup,hash=hash):\n self[:]=tup\n self.hashvalue=hash(tup)\n \n def __hash__(self):\n return self.hashvalue\n \ndef _make_key(args,kwds,typed,\nkwd_mark=(object(),),\nfasttypes={int,str,frozenset,type(None )},\nsorted=sorted,tuple=tuple,type=type,len=len):\n ''\n\n\n\n\n\n\n\n\n \n key=args\n if kwds:\n sorted_items=sorted(kwds.items())\n key +=kwd_mark\n for item in sorted_items:\n key +=item\n if typed:\n key +=tuple(type(v)for v in args)\n if kwds:\n key +=tuple(type(v)for k,v in sorted_items)\n elif len(key)==1 and type(key[0])in fasttypes:\n return key[0]\n return _HashedSeq(key)\n \ndef lru_cache(maxsize=128,typed=False ):\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n \n \n \n \n \n \n \n sentinel=object()\n make_key=_make_key\n PREV,NEXT,KEY,RESULT=0,1,2,3\n \n def decorating_function(user_function):\n \n cache={}\n hits=misses=0\n full=False\n cache_get=cache.get\n lock=RLock()\n root=[]\n root[:]=[root,root,None ,None ]\n \n if maxsize ==0:\n \n def wrapper(*args,**kwds):\n \n nonlocal misses\n result=user_function(*args,**kwds)\n misses +=1\n return result\n \n elif maxsize is None :\n \n def wrapper(*args,**kwds):\n \n nonlocal hits,misses\n key=make_key(args,kwds,typed)\n result=cache_get(key,sentinel)\n if result is not sentinel:\n hits +=1\n return result\n result=user_function(*args,**kwds)\n cache[key]=result\n misses +=1\n return result\n \n else :\n \n def wrapper(*args,**kwds):\n \n nonlocal root,hits,misses,full\n key=make_key(args,kwds,typed)\n with lock:\n link=cache_get(key)\n if link is not None :\n \n link_prev,link_next,_key,result=link\n link_prev[NEXT]=link_next\n link_next[PREV]=link_prev\n last=root[PREV]\n last[NEXT]=root[PREV]=link\n link[PREV]=last\n link[NEXT]=root\n hits +=1\n return result\n result=user_function(*args,**kwds)\n with lock:\n if key in cache:\n \n \n \n \n pass\n elif full:\n \n oldroot=root\n oldroot[KEY]=key\n oldroot[RESULT]=result\n \n \n \n \n \n \n root=oldroot[NEXT]\n oldkey=root[KEY]\n oldresult=root[RESULT]\n root[KEY]=root[RESULT]=None\n \n del cache[oldkey]\n \n \n \n cache[key]=oldroot\n else :\n \n last=root[PREV]\n link=[last,root,key,result]\n last[NEXT]=root[PREV]=cache[key]=link\n full=(len(cache)>=maxsize)\n misses +=1\n return result\n \n def cache_info():\n ''\n with lock:\n return _CacheInfo(hits,misses,maxsize,len(cache))\n \n def cache_clear():\n ''\n nonlocal hits,misses,full\n with lock:\n cache.clear()\n root[:]=[root,root,None ,None ]\n hits=misses=0\n full=False\n \n wrapper.cache_info=cache_info\n wrapper.cache_clear=cache_clear\n return update_wrapper(wrapper,user_function)\n \n return decorating_function\n"], "_markupbase": [".py", "''\n\n\n\n\n\n\nimport re\n\n_declname_match=re.compile(r'[a-zA-Z][-_.a-zA-Z0-9]*\\s*').match\n_declstringlit_match=re.compile(r'(\\'[^\\']*\\'|\"[^\"]*\")\\s*').match\n_commentclose=re.compile(r'--\\s*>')\n_markedsectionclose=re.compile(r']\\s*]\\s*>')\n\n\n\n\n_msmarkedsectionclose=re.compile(r']\\s*>')\n\ndel re\n\n\nclass ParserBase:\n ''\n \n \n def __init__(self):\n if self.__class__ is ParserBase:\n raise RuntimeError(\n \"_markupbase.ParserBase must be subclassed\")\n \n def error(self,message):\n raise NotImplementedError(\n \"subclasses of ParserBase must override error()\")\n \n def reset(self):\n self.lineno=1\n self.offset=0\n \n def getpos(self):\n ''\n return self.lineno,self.offset\n \n \n \n \n \n def updatepos(self,i,j):\n if i >=j:\n return j\n rawdata=self.rawdata\n nlines=rawdata.count(\"\\n\",i,j)\n if nlines:\n self.lineno=self.lineno+nlines\n pos=rawdata.rindex(\"\\n\",i,j)\n self.offset=j -(pos+1)\n else :\n self.offset=self.offset+j -i\n return j\n \n _decl_otherchars=''\n \n \n def parse_declaration(self,i):\n \n \n \n \n \n \n \n \n \n \n rawdata=self.rawdata\n j=i+2\n assert rawdata[i:j]==\"\":\n \n return j+1\n if rawdata[j:j+1]in (\"-\",\"\"):\n \n \n return -1\n \n n=len(rawdata)\n if rawdata[j:j+2]=='--':\n \n return self.parse_comment(i)\n elif rawdata[j]=='[':\n \n \n \n \n return self.parse_marked_section(i)\n else :\n decltype,j=self._scan_name(j,i)\n if j <0:\n return j\n if decltype ==\"doctype\":\n self._decl_otherchars=''\n while j \":\n \n data=rawdata[i+2:j]\n if decltype ==\"doctype\":\n self.handle_decl(data)\n else :\n \n \n \n \n self.unknown_decl(data)\n return j+1\n if c in\"\\\"'\":\n m=_declstringlit_match(rawdata,j)\n if not m:\n return -1\n j=m.end()\n elif c in\"abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ\":\n name,j=self._scan_name(j,i)\n elif c in self._decl_otherchars:\n j=j+1\n elif c ==\"[\":\n \n if decltype ==\"doctype\":\n j=self._parse_doctype_subset(j+1,i)\n elif decltype in {\"attlist\",\"linktype\",\"link\",\"element\"}:\n \n \n \n \n self.error(\"unsupported '[' char in %s declaration\"%decltype)\n else :\n self.error(\"unexpected '[' char in declaration\")\n else :\n self.error(\n \"unexpected %r char in declaration\"%rawdata[j])\n if j <0:\n return j\n return -1\n \n \n \n def parse_marked_section(self,i,report=1):\n rawdata=self.rawdata\n assert rawdata[i:i+3]=='n:\n \n return -1\n if rawdata[j:j+4]==\"\n \n \"\"\"%(self.OutputString(attrs).replace('\"',r'\\\"'))\n \n def OutputString(self,attrs=None ):\n \n \n result=[]\n append=result.append\n \n \n append(\"%s=%s\"%(self.key,self.coded_value))\n \n \n if attrs is None :\n attrs=self._reserved\n items=sorted(self.items())\n for key,value in items:\n if value ==\"\":\n continue\n if key not in attrs:\n continue\n if key ==\"expires\"and isinstance(value,int):\n append(\"%s=%s\"%(self._reserved[key],_getdate(value)))\n elif key ==\"max-age\"and isinstance(value,int):\n append(\"%s=%d\"%(self._reserved[key],value))\n elif key ==\"secure\":\n append(str(self._reserved[key]))\n elif key ==\"httponly\":\n append(str(self._reserved[key]))\n else :\n append(\"%s=%s\"%(self._reserved[key],value))\n \n \n return _semispacejoin(result)\n \n \n \n \n \n \n \n \n \n \n \n_LegalCharsPatt=r\"[\\w\\d!#%&'~_`><@,:/\\$\\*\\+\\-\\.\\^\\|\\)\\(\\?\\}\\{\\=]\"\n_CookiePattern=re.compile(r\"\"\"\n (?x) # This is a verbose pattern\n (?P # Start of group 'key'\n \"\"\"+_LegalCharsPatt+r\"\"\"+? # Any word of at least one letter\n ) # End of group 'key'\n ( # Optional group: there may not be a value.\n \\s*=\\s* # Equal Sign\n (?P # Start of group 'val'\n \"(?:[^\\\\\"]|\\\\.)*\" # Any doublequoted string\n | # or\n \\w{3},\\s[\\w\\d\\s-]{9,11}\\s[\\d:]{8}\\sGMT # Special case for \"expires\" attr\n | # or\n \"\"\"+_LegalCharsPatt+r\"\"\"* # Any word or empty string\n ) # End of group 'val'\n )? # End of optional value group\n \\s* # Any number of spaces.\n (\\s+|;|$) # Ending either at space, semicolon, or EOS.\n \"\"\",re.ASCII)\n\n\n\n\n\nclass BaseCookie(dict):\n ''\n \n def value_decode(self,val):\n ''\n\n\n\n\n \n return val,val\n \n def value_encode(self,val):\n ''\n\n\n\n \n strval=str(val)\n return strval,strval\n \n def __init__(self,input=None ):\n if input:\n self.load(input)\n \n def __set(self,key,real_value,coded_value):\n ''\n M=self.get(key,Morsel())\n M.set(key,real_value,coded_value)\n dict.__setitem__(self,key,M)\n \n def __setitem__(self,key,value):\n ''\n rval,cval=self.value_encode(value)\n self.__set(key,rval,cval)\n \n def output(self,attrs=None ,header=\"Set-Cookie:\",sep=\"\\015\\012\"):\n ''\n result=[]\n items=sorted(self.items())\n for key,value in items:\n result.append(value.output(attrs,header))\n return sep.join(result)\n \n __str__=output\n \n def __repr__(self):\n l=[]\n items=sorted(self.items())\n for key,value in items:\n l.append('%s=%s'%(key,repr(value.value)))\n return'<%s: %s>'%(self.__class__.__name__,_spacejoin(l))\n \n def js_output(self,attrs=None ):\n ''\n result=[]\n items=sorted(self.items())\n for key,value in items:\n result.append(value.js_output(attrs))\n return _nulljoin(result)\n \n def load(self,rawdata):\n ''\n\n\n\n \n if isinstance(rawdata,str):\n self.__parse_string(rawdata)\n else :\n \n for key,value in rawdata.items():\n self[key]=value\n return\n \n def __parse_string(self,str,patt=_CookiePattern):\n i=0\n n=len(str)\n M=None\n \n while 0 <=i =(3,4)\n\nError=concurrent.futures._base.Error\nCancelledError=concurrent.futures.CancelledError\nTimeoutError=concurrent.futures.TimeoutError\n\nSTACK_DEBUG=logging.DEBUG -1\n\n\nclass InvalidStateError(Error):\n ''\n \n \nclass _TracebackLogger:\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n \n __slots__=('loop','source_traceback','exc','tb')\n \n def __init__(self,future,exc):\n self.loop=future._loop\n self.source_traceback=future._source_traceback\n self.exc=exc\n self.tb=None\n \n def activate(self):\n exc=self.exc\n if exc is not None :\n self.exc=None\n \n \n \n try :\n self.tb=traceback.format_exception(exc.__class__,exc)\n except :\n self.tb=None\n \n def clear(self):\n self.exc=None\n self.tb=None\n \n def __del__(self):\n if self.tb:\n msg='Future/Task exception was never retrieved\\n'\n if self.source_traceback:\n src=''.join(traceback.format_list(self.source_traceback))\n msg +='Future/Task created at (most recent call last):\\n'\n msg +='%s\\n'%src.rstrip()\n msg +=''.join(self.tb).rstrip()\n self.loop.call_exception_handler({'message':msg})\n \n \nclass Future:\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n \n \n _state=_PENDING\n _result=None\n _exception=None\n _loop=None\n _source_traceback=None\n \n _blocking=False\n \n _log_traceback=False\n _tb_logger=None\n \n def __init__(self,*,loop=None ):\n ''\n\n\n\n\n \n if loop is None :\n self._loop=events.get_event_loop()\n else :\n self._loop=loop\n self._callbacks=[]\n if self._loop.get_debug():\n self._source_traceback=traceback.extract_stack(sys._getframe(1))\n \n def _format_callbacks(self):\n cb=self._callbacks\n size=len(cb)\n if not size:\n cb=''\n \n def format_cb(callback):\n return events._format_callback(callback,())\n \n if size ==1:\n cb=format_cb(cb[0])\n elif size ==2:\n cb='{}, {}'.format(format_cb(cb[0]),format_cb(cb[1]))\n elif size >2:\n cb='{}, <{} more>, {}'.format(format_cb(cb[0]),\n size -2,\n format_cb(cb[-1]))\n return'cb=[%s]'%cb\n \n def _repr_info(self):\n info=[self._state.lower()]\n if self._state ==_FINISHED:\n if self._exception is not None :\n info.append('exception={!r}'.format(self._exception))\n else :\n \n \n result=reprlib.repr(self._result)\n info.append('result={}'.format(result))\n if self._callbacks:\n info.append(self._format_callbacks())\n if self._source_traceback:\n frame=self._source_traceback[-1]\n info.append('created at %s:%s'%(frame[0],frame[1]))\n return info\n \n def __repr__(self):\n info=self._repr_info()\n return'<%s %s>'%(self.__class__.__name__,' '.join(info))\n \n \n \n \n if _PY34:\n def __del__(self):\n if not self._log_traceback:\n \n \n return\n exc=self._exception\n context={\n 'message':('%s exception was never retrieved'\n %self.__class__.__name__),\n 'exception':exc,\n 'future':self,\n }\n if self._source_traceback:\n context['source_traceback']=self._source_traceback\n self._loop.call_exception_handler(context)\n \n def cancel(self):\n ''\n\n\n\n\n \n if self._state !=_PENDING:\n return False\n self._state=_CANCELLED\n self._schedule_callbacks()\n return True\n \n def _schedule_callbacks(self):\n ''\n\n\n\n \n callbacks=self._callbacks[:]\n if not callbacks:\n return\n \n self._callbacks[:]=[]\n for callback in callbacks:\n self._loop.call_soon(callback,self)\n \n def cancelled(self):\n ''\n return self._state ==_CANCELLED\n \n \n \n def done(self):\n ''\n\n\n\n \n return self._state !=_PENDING\n \n def result(self):\n ''\n\n\n\n\n \n if self._state ==_CANCELLED:\n raise CancelledError\n if self._state !=_FINISHED:\n raise InvalidStateError('Result is not ready.')\n self._log_traceback=False\n if self._tb_logger is not None :\n self._tb_logger.clear()\n self._tb_logger=None\n if self._exception is not None :\n raise self._exception\n return self._result\n \n def exception(self):\n ''\n\n\n\n\n\n \n if self._state ==_CANCELLED:\n raise CancelledError\n if self._state !=_FINISHED:\n raise InvalidStateError('Exception is not set.')\n self._log_traceback=False\n if self._tb_logger is not None :\n self._tb_logger.clear()\n self._tb_logger=None\n return self._exception\n \n def add_done_callback(self,fn):\n ''\n\n\n\n\n \n if self._state !=_PENDING:\n self._loop.call_soon(fn,self)\n else :\n self._callbacks.append(fn)\n \n \n \n def remove_done_callback(self,fn):\n ''\n\n\n \n filtered_callbacks=[f for f in self._callbacks if f !=fn]\n removed_count=len(self._callbacks)-len(filtered_callbacks)\n if removed_count:\n self._callbacks[:]=filtered_callbacks\n return removed_count\n \n \n \n def _set_result_unless_cancelled(self,result):\n ''\n if self.cancelled():\n return\n self.set_result(result)\n \n def set_result(self,result):\n ''\n\n\n\n \n if self._state !=_PENDING:\n raise InvalidStateError('{}: {!r}'.format(self._state,self))\n self._result=result\n self._state=_FINISHED\n self._schedule_callbacks()\n \n def set_exception(self,exception):\n ''\n\n\n\n \n if self._state !=_PENDING:\n raise InvalidStateError('{}: {!r}'.format(self._state,self))\n if isinstance(exception,type):\n exception=exception()\n self._exception=exception\n self._state=_FINISHED\n self._schedule_callbacks()\n if _PY34:\n self._log_traceback=True\n else :\n self._tb_logger=_TracebackLogger(self,exception)\n \n \n self._loop.call_soon(self._tb_logger.activate)\n \n \n \n def _copy_state(self,other):\n ''\n\n\n \n assert other.done()\n if self.cancelled():\n return\n assert not self.done()\n if other.cancelled():\n self.cancel()\n else :\n exception=other.exception()\n if exception is not None :\n self.set_exception(exception)\n else :\n result=other.result()\n self.set_result(result)\n \n def __iter__(self):\n if not self.done():\n self._blocking=True\n yield self\n assert self.done(),\"yield from wasn't used with future\"\n return self.result()\n \n \ndef wrap_future(fut,*,loop=None ):\n ''\n if isinstance(fut,Future):\n return fut\n assert isinstance(fut,concurrent.futures.Future), 'concurrent.futures.Future is expected, got {!r}'.format(fut)\n if loop is None :\n loop=events.get_event_loop()\n new_future=Future(loop=loop)\n \n def _check_cancel_other(f):\n if f.cancelled():\n fut.cancel()\n \n new_future.add_done_callback(_check_cancel_other)\n fut.add_done_callback(\n lambda future:loop.call_soon_threadsafe(\n new_future._copy_state,future))\n return new_future\n"], "antigravity": [".py", "\nimport webbrowser\nimport hashlib\n\nwebbrowser.open(\"http://xkcd.com/353/\")\n\ndef geohash(latitude,longitude,datedow):\n ''\n\n\n\n\n \n \n h=hashlib.md5(datedow).hexdigest()\n p,q=[('%f'%float.fromhex('0.'+x))for x in (h[:16],h[16:32])]\n print('%d%s %d%s'%(latitude,p[1:],longitude,q[1:]))\n"], "posixpath": [".py", "''\n\n\n\n\n\n\n\n\n\n\n\nimport os\nimport sys\nimport stat\nimport genericpath\nfrom genericpath import *\n\n__all__=[\"normcase\",\"isabs\",\"join\",\"splitdrive\",\"split\",\"splitext\",\n\"basename\",\"dirname\",\"commonprefix\",\"getsize\",\"getmtime\",\n\"getatime\",\"getctime\",\"islink\",\"exists\",\"lexists\",\"isdir\",\"isfile\",\n\"ismount\",\"expanduser\",\"expandvars\",\"normpath\",\"abspath\",\n\"samefile\",\"sameopenfile\",\"samestat\",\n\"curdir\",\"pardir\",\"sep\",\"pathsep\",\"defpath\",\"altsep\",\"extsep\",\n\"devnull\",\"realpath\",\"supports_unicode_filenames\",\"relpath\"]\n\n\n\ncurdir='.'\npardir='..'\nextsep='.'\nsep='/'\npathsep=':'\ndefpath=':/bin:/usr/bin'\naltsep=None\ndevnull='/dev/null'\n\ndef _get_sep(path):\n if isinstance(path,bytes):\n return b'/'\n else :\n return'/'\n \n \n \n \n \n \ndef normcase(s):\n ''\n \n if not isinstance(s,(bytes,str)):\n raise TypeError(\"normcase() argument must be str or bytes, \"\n \"not '{}'\".format(s.__class__.__name__))\n return s\n \n \n \n \n \ndef isabs(s):\n ''\n sep=_get_sep(s)\n return s.startswith(sep)\n \n \n \n \n \n \ndef join(a,*p):\n ''\n\n\n \n sep=_get_sep(a)\n path=a\n try :\n for b in p:\n if b.startswith(sep):\n path=b\n elif not path or path.endswith(sep):\n path +=b\n else :\n path +=sep+b\n except TypeError:\n valid_types=all(isinstance(s,(str,bytes,bytearray))\n for s in (a,)+p)\n if valid_types:\n \n raise TypeError(\"Can't mix strings and bytes in path \"\n \"components.\")from None\n raise\n return path\n \n \n \n \n \n \n \ndef split(p):\n ''\n \n sep=_get_sep(p)\n i=p.rfind(sep)+1\n head,tail=p[:i],p[i:]\n if head and head !=sep *len(head):\n head=head.rstrip(sep)\n return head,tail\n \n \n \n \n \n \n \ndef splitext(p):\n if isinstance(p,bytes):\n sep=b'/'\n extsep=b'.'\n else :\n sep='/'\n extsep='.'\n return genericpath._splitext(p,sep,None ,extsep)\nsplitext.__doc__=genericpath._splitext.__doc__\n\n\n\n\ndef splitdrive(p):\n ''\n \n return p[:0],p\n \n \n \n \ndef basename(p):\n ''\n sep=_get_sep(p)\n i=p.rfind(sep)+1\n return p[i:]\n \n \n \n \ndef dirname(p):\n ''\n sep=_get_sep(p)\n i=p.rfind(sep)+1\n head=p[:i]\n if head and head !=sep *len(head):\n head=head.rstrip(sep)\n return head\n \n \n \n \n \ndef islink(path):\n ''\n try :\n st=os.lstat(path)\n except (os.error,AttributeError):\n return False\n return stat.S_ISLNK(st.st_mode)\n \n \n \ndef lexists(path):\n ''\n try :\n os.lstat(path)\n except os.error:\n return False\n return True\n \n \n \n \ndef samefile(f1,f2):\n ''\n s1=os.stat(f1)\n s2=os.stat(f2)\n return samestat(s1,s2)\n \n \n \n \n \ndef sameopenfile(fp1,fp2):\n ''\n s1=os.fstat(fp1)\n s2=os.fstat(fp2)\n return samestat(s1,s2)\n \n \n \n \n \ndef samestat(s1,s2):\n ''\n return s1.st_ino ==s2.st_ino and s1.st_dev ==s2.st_dev\n \n \n \n \n \ndef ismount(path):\n ''\n if islink(path):\n \n return False\n try :\n s1=os.lstat(path)\n if isinstance(path,bytes):\n parent=join(path,b'..')\n else :\n parent=join(path,'..')\n s2=os.lstat(parent)\n except os.error:\n return False\n dev1=s1.st_dev\n dev2=s2.st_dev\n if dev1 !=dev2:\n return True\n ino1=s1.st_ino\n ino2=s2.st_ino\n if ino1 ==ino2:\n return True\n return False\n \n \n \n \n \n \n \n \n \n \n \ndef expanduser(path):\n ''\n return path\n \n \n \n \n \n_varprog=None\n_varprogb=None\n\ndef expandvars(path):\n ''\n \n global _varprog,_varprogb\n if isinstance(path,bytes):\n if b'$'not in path:\n return path\n if not _varprogb:\n import re\n _varprogb=re.compile(br'\\$(\\w+|\\{[^}]*\\})',re.ASCII)\n search=_varprogb.search\n start=b'{'\n end=b'}'\n else :\n if'$'not in path:\n return path\n if not _varprog:\n import re\n _varprog=re.compile(r'\\$(\\w+|\\{[^}]*\\})',re.ASCII)\n search=_varprog.search\n start='{'\n end='}'\n i=0\n while True :\n m=search(path,i)\n if not m:\n break\n i,j=m.span(0)\n name=m.group(1)\n if name.startswith(start)and name.endswith(end):\n name=name[1:-1]\n if isinstance(name,bytes):\n name=str(name,'ASCII')\n if name in os.environ:\n tail=path[j:]\n value=os.environ[name]\n if isinstance(path,bytes):\n value=value.encode('ASCII')\n path=path[:i]+value\n i=len(path)\n path +=tail\n else :\n i=j\n return path\n \n \n \n \n \n \ndef normpath(path):\n ''\n if isinstance(path,bytes):\n sep=b'/'\n empty=b''\n dot=b'.'\n dotdot=b'..'\n else :\n sep='/'\n empty=''\n dot='.'\n dotdot='..'\n if path ==empty:\n return dot\n initial_slashes=path.startswith(sep)\n \n \n if (initial_slashes and\n path.startswith(sep *2)and not path.startswith(sep *3)):\n initial_slashes=2\n comps=path.split(sep)\n new_comps=[]\n for comp in comps:\n if comp in (empty,dot):\n continue\n if (comp !=dotdot or (not initial_slashes and not new_comps)or\n (new_comps and new_comps[-1]==dotdot)):\n new_comps.append(comp)\n elif new_comps:\n new_comps.pop()\n comps=new_comps\n path=sep.join(comps)\n if initial_slashes:\n path=sep *initial_slashes+path\n return path or dot\n \n \ndef abspath(path):\n ''\n if not isabs(path):\n if isinstance(path,bytes):\n cwd=os.getcwdb()\n else :\n cwd=os.getcwd()\n path=join(cwd,path)\n return normpath(path)\n \n \n \n \n \ndef realpath(filename):\n ''\n \n path,ok=_joinrealpath(filename[:0],filename,{})\n return abspath(path)\n \n \n \ndef _joinrealpath(path,rest,seen):\n if isinstance(path,bytes):\n sep=b'/'\n curdir=b'.'\n pardir=b'..'\n else :\n sep='/'\n curdir='.'\n pardir='..'\n \n if isabs(rest):\n rest=rest[1:]\n path=sep\n \n while rest:\n name,_,rest=rest.partition(sep)\n if not name or name ==curdir:\n \n continue\n if name ==pardir:\n \n if path:\n path,name=split(path)\n if name ==pardir:\n path=join(path,pardir,pardir)\n else :\n path=pardir\n continue\n newpath=join(path,name)\n if not islink(newpath):\n path=newpath\n continue\n \n if newpath in seen:\n \n path=seen[newpath]\n if path is not None :\n \n continue\n \n \n return join(newpath,rest),False\n seen[newpath]=None\n path,ok=_joinrealpath(path,os.readlink(newpath),seen)\n if not ok:\n return join(path,rest),False\n seen[newpath]=path\n \n return path,True\n \n \nsupports_unicode_filenames=(sys.platform =='darwin')\n\ndef relpath(path,start=None ):\n ''\n \n if not path:\n raise ValueError(\"no path specified\")\n \n if isinstance(path,bytes):\n curdir=b'.'\n sep=b'/'\n pardir=b'..'\n else :\n curdir='.'\n sep='/'\n pardir='..'\n \n if start is None :\n start=curdir\n \n start_list=[x for x in abspath(start).split(sep)if x]\n path_list=[x for x in abspath(path).split(sep)if x]\n \n \n i=len(commonprefix([start_list,path_list]))\n \n rel_list=[pardir]*(len(start_list)-i)+path_list[i:]\n if not rel_list:\n return curdir\n return join(*rel_list)\n"], "sys": [".py", "\nfrom _sys import *\n\n_getframe=Getframe\nfrom javascript import JSObject\nfrom browser import window\n\nbrython_debug_mode=__BRYTHON__.debug\n\nbase_exec_prefix=__BRYTHON__.brython_path\n\nbase_prefix=__BRYTHON__.brython_path\n\nbuiltin_module_names=__BRYTHON__.builtin_module_names\n\nbyteorder='little'\n\ndef exc_info():\n exc=__BRYTHON__.current_exception\n if exc is None :\n return (None ,None ,None )\n return (exc.__class__,exc,exc.traceback)\n \nexec_prefix=__BRYTHON__.brython_path\n\nexecutable=__BRYTHON__.brython_path+'/brython.js'\n\ndef exit(i=None ):\n raise SystemExit('')\n \nclass flag_class:\n def __init__(self):\n self.debug=0\n self.inspect=0\n self.interactive=0\n self.optimize=0\n self.dont_write_bytecode=0\n self.no_user_site=0\n self.no_site=0\n self.ignore_environment=0\n self.verbose=0\n self.bytes_warning=0\n self.quiet=0\n self.hash_randomization=1\n \nflags=flag_class()\n\ndef getfilesystemencoding(*args,**kw):\n ''\n\n \n return'utf-8'\n \ndef getrecursionlimit():\n return 200\n \nmaxsize=2 **63 -1\n\nmaxunicode=1114111\n\n\n\n\n\n\nplatform=\"brython\"\n\nprefix=__BRYTHON__.brython_path\n\nversion='.'.join(str(x)for x in __BRYTHON__.version_info[:3])\nversion +=\" (default, %s) \\n[Javascript 1.5] on Brython\"%__BRYTHON__.compiled_date\nhexversion=0x03000000\n\nclass __version_info(object):\n def __init__(self,version_info):\n self.version_info=version_info\n self.major=version_info[0]\n self.minor=version_info[1]\n self.micro=version_info[2]\n self.releaselevel=version_info[3]\n self.serial=version_info[4]\n \n def __getitem__(self,index):\n if isinstance(self.version_info[index],list):\n return tuple(self.version_info[index])\n return self.version_info[index]\n \n def hexversion(self):\n try :\n return'0%d0%d0%d'%(self.major,self.minor,self.micro)\n finally :\n return'0%d0000'%(self.major)\n \n def __str__(self):\n _s=\"sys.version(major=%d, minor=%d, micro=%d, releaselevel='%s', serial=%d)\"\n return _s %(self.major,self.minor,self.micro,\n self.releaselevel,self.serial)\n \n \n def __eq__(self,other):\n if isinstance(other,tuple):\n return (self.major,self.minor,self.micro)==other\n \n raise Error(\"Error! I don't know how to compare!\")\n \n def __ge__(self,other):\n if isinstance(other,tuple):\n return (self.major,self.minor,self.micro)>=other\n \n raise Error(\"Error! I don't know how to compare!\")\n \n def __gt__(self,other):\n if isinstance(other,tuple):\n return (self.major,self.minor,self.micro)>other\n \n raise Error(\"Error! I don't know how to compare!\")\n \n def __le__(self,other):\n if isinstance(other,tuple):\n return (self.major,self.minor,self.micro)<=other\n \n raise Error(\"Error! I don't know how to compare!\")\n \n def __lt__(self,other):\n if isinstance(other,tuple):\n return (self.major,self.minor,self.micro)'\n'?'\n'\\xf8'\n'\\xc9'\n'\\xca'\n'\\xcb'\n'\\xc8'\n'\\xcd'\n'\\xce'\n'\\xcf'\n'\\xcc'\n'`'\n':'\n'#'\n'@'\n\"'\"\n'='\n'\"'\n'\\xd8'\n'a'\n'b'\n'c'\n'd'\n'e'\n'f'\n'g'\n'h'\n'i'\n'\\xab'\n'\\xbb'\n'\\xf0'\n'\\xfd'\n'\\xfe'\n'\\xb1'\n'\\xb0'\n'j'\n'k'\n'l'\n'm'\n'n'\n'o'\n'p'\n'q'\n'r'\n'\\xaa'\n'\\xba'\n'\\xe6'\n'\\xb8'\n'\\xc6'\n'\\u20ac'\n'\\xb5'\n'~'\n's'\n't'\n'u'\n'v'\n'w'\n'x'\n'y'\n'z'\n'\\xa1'\n'\\xbf'\n'\\xd0'\n'\\xdd'\n'\\xde'\n'\\xae'\n'^'\n'\\xa3'\n'\\xa5'\n'\\xb7'\n'\\xa9'\n'\\xa7'\n'\\xb6'\n'\\xbc'\n'\\xbd'\n'\\xbe'\n'['\n']'\n'\\xaf'\n'\\xa8'\n'\\xb4'\n'\\xd7'\n'{'\n'A'\n'B'\n'C'\n'D'\n'E'\n'F'\n'G'\n'H'\n'I'\n'\\xad'\n'\\xf4'\n'\\xf6'\n'\\xf2'\n'\\xf3'\n'\\xf5'\n'}'\n'J'\n'K'\n'L'\n'M'\n'N'\n'O'\n'P'\n'Q'\n'R'\n'\\xb9'\n'\\xfb'\n'\\xfc'\n'\\xf9'\n'\\xfa'\n'\\xff'\n'\\\\'\n'\\xf7'\n'S'\n'T'\n'U'\n'V'\n'W'\n'X'\n'Y'\n'Z'\n'\\xb2'\n'\\xd4'\n'\\xd6'\n'\\xd2'\n'\\xd3'\n'\\xd5'\n'0'\n'1'\n'2'\n'3'\n'4'\n'5'\n'6'\n'7'\n'8'\n'9'\n'\\xb3'\n'\\xdb'\n'\\xdc'\n'\\xd9'\n'\\xda'\n'\\x9f'\n)\n\n\nencoding_table=codecs.charmap_build(decoding_table)\n"], "numbers": [".py", "\n\n\n\"\"\"Abstract Base Classes (ABCs) for numbers, according to PEP 3141.\n\nTODO: Fill out more detailed documentation on the operators.\"\"\"\n\nfrom abc import ABCMeta,abstractmethod\n\n__all__=[\"Number\",\"Complex\",\"Real\",\"Rational\",\"Integral\"]\n\nclass Number(metaclass=ABCMeta):\n ''\n\n\n\n \n __slots__=()\n \n \n __hash__=None\n \n \n \n \n \n \n \n \n \n \nclass Complex(Number):\n ''\n\n\n\n\n\n\n\n \n \n __slots__=()\n \n @abstractmethod\n def __complex__(self):\n ''\n \n def __bool__(self):\n ''\n return self !=0\n \n @property\n @abstractmethod\n def real(self):\n ''\n\n\n \n raise NotImplementedError\n \n @property\n @abstractmethod\n def imag(self):\n ''\n\n\n \n raise NotImplementedError\n \n @abstractmethod\n def __add__(self,other):\n ''\n raise NotImplementedError\n \n @abstractmethod\n def __radd__(self,other):\n ''\n raise NotImplementedError\n \n @abstractmethod\n def __neg__(self):\n ''\n raise NotImplementedError\n \n @abstractmethod\n def __pos__(self):\n ''\n raise NotImplementedError\n \n def __sub__(self,other):\n ''\n return self+-other\n \n def __rsub__(self,other):\n ''\n return -self+other\n \n @abstractmethod\n def __mul__(self,other):\n ''\n raise NotImplementedError\n \n @abstractmethod\n def __rmul__(self,other):\n ''\n raise NotImplementedError\n \n @abstractmethod\n def __truediv__(self,other):\n ''\n raise NotImplementedError\n \n @abstractmethod\n def __rtruediv__(self,other):\n ''\n raise NotImplementedError\n \n @abstractmethod\n def __pow__(self,exponent):\n ''\n raise NotImplementedError\n \n @abstractmethod\n def __rpow__(self,base):\n ''\n raise NotImplementedError\n \n @abstractmethod\n def __abs__(self):\n ''\n raise NotImplementedError\n \n @abstractmethod\n def conjugate(self):\n ''\n raise NotImplementedError\n \n @abstractmethod\n def __eq__(self,other):\n ''\n raise NotImplementedError\n \n def __ne__(self,other):\n ''\n \n return not (self ==other)\n \nComplex.register(complex)\n\n\nclass Real(Complex):\n ''\n\n\n\n\n\n \n \n __slots__=()\n \n @abstractmethod\n def __float__(self):\n ''\n\n \n raise NotImplementedError\n \n @abstractmethod\n def __trunc__(self):\n ''\n\n\n\n\n\n\n\n \n raise NotImplementedError\n \n @abstractmethod\n def __floor__(self):\n ''\n raise NotImplementedError\n \n @abstractmethod\n def __ceil__(self):\n ''\n raise NotImplementedError\n \n @abstractmethod\n def __round__(self,ndigits=None ):\n ''\n\n\n\n \n raise NotImplementedError\n \n def __divmod__(self,other):\n ''\n\n\n\n \n return (self //other,self %other)\n \n def __rdivmod__(self,other):\n ''\n\n\n\n \n return (other //self,other %self)\n \n @abstractmethod\n def __floordiv__(self,other):\n ''\n raise NotImplementedError\n \n @abstractmethod\n def __rfloordiv__(self,other):\n ''\n raise NotImplementedError\n \n @abstractmethod\n def __mod__(self,other):\n ''\n raise NotImplementedError\n \n @abstractmethod\n def __rmod__(self,other):\n ''\n raise NotImplementedError\n \n @abstractmethod\n def __lt__(self,other):\n ''\n\n \n raise NotImplementedError\n \n @abstractmethod\n def __le__(self,other):\n ''\n raise NotImplementedError\n \n \n def __complex__(self):\n ''\n return complex(float(self))\n \n @property\n def real(self):\n ''\n return +self\n \n @property\n def imag(self):\n ''\n return 0\n \n def conjugate(self):\n ''\n return +self\n \nReal.register(float)\n\n\nclass Rational(Real):\n ''\n \n __slots__=()\n \n @property\n @abstractmethod\n def numerator(self):\n raise NotImplementedError\n \n @property\n @abstractmethod\n def denominator(self):\n raise NotImplementedError\n \n \n def __float__(self):\n ''\n\n\n\n\n\n \n return self.numerator /self.denominator\n \n \nclass Integral(Rational):\n ''\n \n __slots__=()\n \n @abstractmethod\n def __int__(self):\n ''\n raise NotImplementedError\n \n def __index__(self):\n ''\n return int(self)\n \n @abstractmethod\n def __pow__(self,exponent,modulus=None ):\n ''\n\n\n\n\n\n \n raise NotImplementedError\n \n @abstractmethod\n def __lshift__(self,other):\n ''\n raise NotImplementedError\n \n @abstractmethod\n def __rlshift__(self,other):\n ''\n raise NotImplementedError\n \n @abstractmethod\n def __rshift__(self,other):\n ''\n raise NotImplementedError\n \n @abstractmethod\n def __rrshift__(self,other):\n ''\n raise NotImplementedError\n \n @abstractmethod\n def __and__(self,other):\n ''\n raise NotImplementedError\n \n @abstractmethod\n def __rand__(self,other):\n ''\n raise NotImplementedError\n \n @abstractmethod\n def __xor__(self,other):\n ''\n raise NotImplementedError\n \n @abstractmethod\n def __rxor__(self,other):\n ''\n raise NotImplementedError\n \n @abstractmethod\n def __or__(self,other):\n ''\n raise NotImplementedError\n \n @abstractmethod\n def __ror__(self,other):\n ''\n raise NotImplementedError\n \n @abstractmethod\n def __invert__(self):\n ''\n raise NotImplementedError\n \n \n def __float__(self):\n ''\n return float(int(self))\n \n @property\n def numerator(self):\n ''\n return +self\n \n @property\n def denominator(self):\n ''\n return 1\n \nIntegral.register(int)\n"], "encodings.aliases": [".py", "''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\naliases={\n\n\n\n\n'646':'ascii',\n'ansi_x3.4_1968':'ascii',\n'ansi_x3_4_1968':'ascii',\n'ansi_x3.4_1986':'ascii',\n'cp367':'ascii',\n'csascii':'ascii',\n'ibm367':'ascii',\n'iso646_us':'ascii',\n'iso_646.irv_1991':'ascii',\n'iso_ir_6':'ascii',\n'us':'ascii',\n'us_ascii':'ascii',\n\n\n'base64':'base64_codec',\n'base_64':'base64_codec',\n\n\n'big5_tw':'big5',\n'csbig5':'big5',\n\n\n'big5_hkscs':'big5hkscs',\n'hkscs':'big5hkscs',\n\n\n'bz2':'bz2_codec',\n\n\n'037':'cp037',\n'csibm037':'cp037',\n'ebcdic_cp_ca':'cp037',\n'ebcdic_cp_nl':'cp037',\n'ebcdic_cp_us':'cp037',\n'ebcdic_cp_wt':'cp037',\n'ibm037':'cp037',\n'ibm039':'cp037',\n\n\n'1026':'cp1026',\n'csibm1026':'cp1026',\n'ibm1026':'cp1026',\n\n\n'1125':'cp1125',\n'ibm1125':'cp1125',\n'cp866u':'cp1125',\n'ruscii':'cp1125',\n\n\n'1140':'cp1140',\n'ibm1140':'cp1140',\n\n\n'1250':'cp1250',\n'windows_1250':'cp1250',\n\n\n'1251':'cp1251',\n'windows_1251':'cp1251',\n\n\n'1252':'cp1252',\n'windows_1252':'cp1252',\n\n\n'1253':'cp1253',\n'windows_1253':'cp1253',\n\n\n'1254':'cp1254',\n'windows_1254':'cp1254',\n\n\n'1255':'cp1255',\n'windows_1255':'cp1255',\n\n\n'1256':'cp1256',\n'windows_1256':'cp1256',\n\n\n'1257':'cp1257',\n'windows_1257':'cp1257',\n\n\n'1258':'cp1258',\n'windows_1258':'cp1258',\n\n\n'273':'cp273',\n'ibm273':'cp273',\n'csibm273':'cp273',\n\n\n'424':'cp424',\n'csibm424':'cp424',\n'ebcdic_cp_he':'cp424',\n'ibm424':'cp424',\n\n\n'437':'cp437',\n'cspc8codepage437':'cp437',\n'ibm437':'cp437',\n\n\n'500':'cp500',\n'csibm500':'cp500',\n'ebcdic_cp_be':'cp500',\n'ebcdic_cp_ch':'cp500',\n'ibm500':'cp500',\n\n\n'775':'cp775',\n'cspc775baltic':'cp775',\n'ibm775':'cp775',\n\n\n'850':'cp850',\n'cspc850multilingual':'cp850',\n'ibm850':'cp850',\n\n\n'852':'cp852',\n'cspcp852':'cp852',\n'ibm852':'cp852',\n\n\n'855':'cp855',\n'csibm855':'cp855',\n'ibm855':'cp855',\n\n\n'857':'cp857',\n'csibm857':'cp857',\n'ibm857':'cp857',\n\n\n'858':'cp858',\n'csibm858':'cp858',\n'ibm858':'cp858',\n\n\n'860':'cp860',\n'csibm860':'cp860',\n'ibm860':'cp860',\n\n\n'861':'cp861',\n'cp_is':'cp861',\n'csibm861':'cp861',\n'ibm861':'cp861',\n\n\n'862':'cp862',\n'cspc862latinhebrew':'cp862',\n'ibm862':'cp862',\n\n\n'863':'cp863',\n'csibm863':'cp863',\n'ibm863':'cp863',\n\n\n'864':'cp864',\n'csibm864':'cp864',\n'ibm864':'cp864',\n\n\n'865':'cp865',\n'csibm865':'cp865',\n'ibm865':'cp865',\n\n\n'866':'cp866',\n'csibm866':'cp866',\n'ibm866':'cp866',\n\n\n'869':'cp869',\n'cp_gr':'cp869',\n'csibm869':'cp869',\n'ibm869':'cp869',\n\n\n'932':'cp932',\n'ms932':'cp932',\n'mskanji':'cp932',\n'ms_kanji':'cp932',\n\n\n'949':'cp949',\n'ms949':'cp949',\n'uhc':'cp949',\n\n\n'950':'cp950',\n'ms950':'cp950',\n\n\n'jisx0213':'euc_jis_2004',\n'eucjis2004':'euc_jis_2004',\n'euc_jis2004':'euc_jis_2004',\n\n\n'eucjisx0213':'euc_jisx0213',\n\n\n'eucjp':'euc_jp',\n'ujis':'euc_jp',\n'u_jis':'euc_jp',\n\n\n'euckr':'euc_kr',\n'korean':'euc_kr',\n'ksc5601':'euc_kr',\n'ks_c_5601':'euc_kr',\n'ks_c_5601_1987':'euc_kr',\n'ksx1001':'euc_kr',\n'ks_x_1001':'euc_kr',\n\n\n'gb18030_2000':'gb18030',\n\n\n'chinese':'gb2312',\n'csiso58gb231280':'gb2312',\n'euc_cn':'gb2312',\n'euccn':'gb2312',\n'eucgb2312_cn':'gb2312',\n'gb2312_1980':'gb2312',\n'gb2312_80':'gb2312',\n'iso_ir_58':'gb2312',\n\n\n'936':'gbk',\n'cp936':'gbk',\n'ms936':'gbk',\n\n\n'hex':'hex_codec',\n\n\n'roman8':'hp_roman8',\n'r8':'hp_roman8',\n'csHPRoman8':'hp_roman8',\n\n\n'hzgb':'hz',\n'hz_gb':'hz',\n'hz_gb_2312':'hz',\n\n\n'csiso2022jp':'iso2022_jp',\n'iso2022jp':'iso2022_jp',\n'iso_2022_jp':'iso2022_jp',\n\n\n'iso2022jp_1':'iso2022_jp_1',\n'iso_2022_jp_1':'iso2022_jp_1',\n\n\n'iso2022jp_2':'iso2022_jp_2',\n'iso_2022_jp_2':'iso2022_jp_2',\n\n\n'iso_2022_jp_2004':'iso2022_jp_2004',\n'iso2022jp_2004':'iso2022_jp_2004',\n\n\n'iso2022jp_3':'iso2022_jp_3',\n'iso_2022_jp_3':'iso2022_jp_3',\n\n\n'iso2022jp_ext':'iso2022_jp_ext',\n'iso_2022_jp_ext':'iso2022_jp_ext',\n\n\n'csiso2022kr':'iso2022_kr',\n'iso2022kr':'iso2022_kr',\n'iso_2022_kr':'iso2022_kr',\n\n\n'csisolatin6':'iso8859_10',\n'iso_8859_10':'iso8859_10',\n'iso_8859_10_1992':'iso8859_10',\n'iso_ir_157':'iso8859_10',\n'l6':'iso8859_10',\n'latin6':'iso8859_10',\n\n\n'thai':'iso8859_11',\n'iso_8859_11':'iso8859_11',\n'iso_8859_11_2001':'iso8859_11',\n\n\n'iso_8859_13':'iso8859_13',\n'l7':'iso8859_13',\n'latin7':'iso8859_13',\n\n\n'iso_8859_14':'iso8859_14',\n'iso_8859_14_1998':'iso8859_14',\n'iso_celtic':'iso8859_14',\n'iso_ir_199':'iso8859_14',\n'l8':'iso8859_14',\n'latin8':'iso8859_14',\n\n\n'iso_8859_15':'iso8859_15',\n'l9':'iso8859_15',\n'latin9':'iso8859_15',\n\n\n'iso_8859_16':'iso8859_16',\n'iso_8859_16_2001':'iso8859_16',\n'iso_ir_226':'iso8859_16',\n'l10':'iso8859_16',\n'latin10':'iso8859_16',\n\n\n'csisolatin2':'iso8859_2',\n'iso_8859_2':'iso8859_2',\n'iso_8859_2_1987':'iso8859_2',\n'iso_ir_101':'iso8859_2',\n'l2':'iso8859_2',\n'latin2':'iso8859_2',\n\n\n'csisolatin3':'iso8859_3',\n'iso_8859_3':'iso8859_3',\n'iso_8859_3_1988':'iso8859_3',\n'iso_ir_109':'iso8859_3',\n'l3':'iso8859_3',\n'latin3':'iso8859_3',\n\n\n'csisolatin4':'iso8859_4',\n'iso_8859_4':'iso8859_4',\n'iso_8859_4_1988':'iso8859_4',\n'iso_ir_110':'iso8859_4',\n'l4':'iso8859_4',\n'latin4':'iso8859_4',\n\n\n'csisolatincyrillic':'iso8859_5',\n'cyrillic':'iso8859_5',\n'iso_8859_5':'iso8859_5',\n'iso_8859_5_1988':'iso8859_5',\n'iso_ir_144':'iso8859_5',\n\n\n'arabic':'iso8859_6',\n'asmo_708':'iso8859_6',\n'csisolatinarabic':'iso8859_6',\n'ecma_114':'iso8859_6',\n'iso_8859_6':'iso8859_6',\n'iso_8859_6_1987':'iso8859_6',\n'iso_ir_127':'iso8859_6',\n\n\n'csisolatingreek':'iso8859_7',\n'ecma_118':'iso8859_7',\n'elot_928':'iso8859_7',\n'greek':'iso8859_7',\n'greek8':'iso8859_7',\n'iso_8859_7':'iso8859_7',\n'iso_8859_7_1987':'iso8859_7',\n'iso_ir_126':'iso8859_7',\n\n\n'csisolatinhebrew':'iso8859_8',\n'hebrew':'iso8859_8',\n'iso_8859_8':'iso8859_8',\n'iso_8859_8_1988':'iso8859_8',\n'iso_ir_138':'iso8859_8',\n\n\n'csisolatin5':'iso8859_9',\n'iso_8859_9':'iso8859_9',\n'iso_8859_9_1989':'iso8859_9',\n'iso_ir_148':'iso8859_9',\n'l5':'iso8859_9',\n'latin5':'iso8859_9',\n\n\n'cp1361':'johab',\n'ms1361':'johab',\n\n\n'cskoi8r':'koi8_r',\n\n\n\n\n\n\n\n\n'8859':'latin_1',\n'cp819':'latin_1',\n'csisolatin1':'latin_1',\n'ibm819':'latin_1',\n'iso8859':'latin_1',\n'iso8859_1':'latin_1',\n'iso_8859_1':'latin_1',\n'iso_8859_1_1987':'latin_1',\n'iso_ir_100':'latin_1',\n'l1':'latin_1',\n'latin':'latin_1',\n'latin1':'latin_1',\n\n\n'maccyrillic':'mac_cyrillic',\n\n\n'macgreek':'mac_greek',\n\n\n'maciceland':'mac_iceland',\n\n\n'maccentraleurope':'mac_latin2',\n'maclatin2':'mac_latin2',\n\n\n'macintosh':'mac_roman',\n'macroman':'mac_roman',\n\n\n'macturkish':'mac_turkish',\n\n\n'dbcs':'mbcs',\n\n\n'csptcp154':'ptcp154',\n'pt154':'ptcp154',\n'cp154':'ptcp154',\n'cyrillic_asian':'ptcp154',\n\n\n'quopri':'quopri_codec',\n'quoted_printable':'quopri_codec',\n'quotedprintable':'quopri_codec',\n\n\n'rot13':'rot_13',\n\n\n'csshiftjis':'shift_jis',\n'shiftjis':'shift_jis',\n'sjis':'shift_jis',\n's_jis':'shift_jis',\n\n\n'shiftjis2004':'shift_jis_2004',\n'sjis_2004':'shift_jis_2004',\n's_jis_2004':'shift_jis_2004',\n\n\n'shiftjisx0213':'shift_jisx0213',\n'sjisx0213':'shift_jisx0213',\n's_jisx0213':'shift_jisx0213',\n\n\n'tis260':'tactis',\n\n\n'tis620':'tis_620',\n'tis_620_0':'tis_620',\n'tis_620_2529_0':'tis_620',\n'tis_620_2529_1':'tis_620',\n'iso_ir_166':'tis_620',\n\n\n'u16':'utf_16',\n'utf16':'utf_16',\n\n\n'unicodebigunmarked':'utf_16_be',\n'utf_16be':'utf_16_be',\n\n\n'unicodelittleunmarked':'utf_16_le',\n'utf_16le':'utf_16_le',\n\n\n'u32':'utf_32',\n'utf32':'utf_32',\n\n\n'utf_32be':'utf_32_be',\n\n\n'utf_32le':'utf_32_le',\n\n\n'u7':'utf_7',\n'utf7':'utf_7',\n'unicode_1_1_utf_7':'utf_7',\n\n\n'u8':'utf_8',\n'utf':'utf_8',\n'utf8':'utf_8',\n'utf8_ucs2':'utf_8',\n'utf8_ucs4':'utf_8',\n\n\n'uu':'uu_codec',\n\n\n'zip':'zlib_codec',\n'zlib':'zlib_codec',\n\n\n'x_mac_japanese':'shift_jis',\n'x_mac_korean':'euc_kr',\n'x_mac_simp_chinese':'gb2312',\n'x_mac_trad_chinese':'big5',\n}\n"], "encodings.mac_croatian": [".py", "''\n\n\n\nimport codecs\n\n\n\nclass Codec(codecs.Codec):\n\n def encode(self,input,errors='strict'):\n return codecs.charmap_encode(input,errors,encoding_table)\n \n def decode(self,input,errors='strict'):\n return codecs.charmap_decode(input,errors,decoding_table)\n \nclass IncrementalEncoder(codecs.IncrementalEncoder):\n def encode(self,input,final=False ):\n return codecs.charmap_encode(input,self.errors,encoding_table)[0]\n \nclass IncrementalDecoder(codecs.IncrementalDecoder):\n def decode(self,input,final=False ):\n return codecs.charmap_decode(input,self.errors,decoding_table)[0]\n \nclass StreamWriter(Codec,codecs.StreamWriter):\n pass\n \nclass StreamReader(Codec,codecs.StreamReader):\n pass\n \n \n \ndef getregentry():\n return codecs.CodecInfo(\n name='mac-croatian',\n encode=Codec().encode,\n decode=Codec().decode,\n incrementalencoder=IncrementalEncoder,\n incrementaldecoder=IncrementalDecoder,\n streamreader=StreamReader,\n streamwriter=StreamWriter,\n )\n \n \n \n \ndecoding_table=(\n'\\x00'\n'\\x01'\n'\\x02'\n'\\x03'\n'\\x04'\n'\\x05'\n'\\x06'\n'\\x07'\n'\\x08'\n'\\t'\n'\\n'\n'\\x0b'\n'\\x0c'\n'\\r'\n'\\x0e'\n'\\x0f'\n'\\x10'\n'\\x11'\n'\\x12'\n'\\x13'\n'\\x14'\n'\\x15'\n'\\x16'\n'\\x17'\n'\\x18'\n'\\x19'\n'\\x1a'\n'\\x1b'\n'\\x1c'\n'\\x1d'\n'\\x1e'\n'\\x1f'\n' '\n'!'\n'\"'\n'#'\n'$'\n'%'\n'&'\n\"'\"\n'('\n')'\n'*'\n'+'\n','\n'-'\n'.'\n'/'\n'0'\n'1'\n'2'\n'3'\n'4'\n'5'\n'6'\n'7'\n'8'\n'9'\n':'\n';'\n'<'\n'='\n'>'\n'?'\n'@'\n'A'\n'B'\n'C'\n'D'\n'E'\n'F'\n'G'\n'H'\n'I'\n'J'\n'K'\n'L'\n'M'\n'N'\n'O'\n'P'\n'Q'\n'R'\n'S'\n'T'\n'U'\n'V'\n'W'\n'X'\n'Y'\n'Z'\n'['\n'\\\\'\n']'\n'^'\n'_'\n'`'\n'a'\n'b'\n'c'\n'd'\n'e'\n'f'\n'g'\n'h'\n'i'\n'j'\n'k'\n'l'\n'm'\n'n'\n'o'\n'p'\n'q'\n'r'\n's'\n't'\n'u'\n'v'\n'w'\n'x'\n'y'\n'z'\n'{'\n'|'\n'}'\n'~'\n'\\x7f'\n'\\xc4'\n'\\xc5'\n'\\xc7'\n'\\xc9'\n'\\xd1'\n'\\xd6'\n'\\xdc'\n'\\xe1'\n'\\xe0'\n'\\xe2'\n'\\xe4'\n'\\xe3'\n'\\xe5'\n'\\xe7'\n'\\xe9'\n'\\xe8'\n'\\xea'\n'\\xeb'\n'\\xed'\n'\\xec'\n'\\xee'\n'\\xef'\n'\\xf1'\n'\\xf3'\n'\\xf2'\n'\\xf4'\n'\\xf6'\n'\\xf5'\n'\\xfa'\n'\\xf9'\n'\\xfb'\n'\\xfc'\n'\\u2020'\n'\\xb0'\n'\\xa2'\n'\\xa3'\n'\\xa7'\n'\\u2022'\n'\\xb6'\n'\\xdf'\n'\\xae'\n'\\u0160'\n'\\u2122'\n'\\xb4'\n'\\xa8'\n'\\u2260'\n'\\u017d'\n'\\xd8'\n'\\u221e'\n'\\xb1'\n'\\u2264'\n'\\u2265'\n'\\u2206'\n'\\xb5'\n'\\u2202'\n'\\u2211'\n'\\u220f'\n'\\u0161'\n'\\u222b'\n'\\xaa'\n'\\xba'\n'\\u03a9'\n'\\u017e'\n'\\xf8'\n'\\xbf'\n'\\xa1'\n'\\xac'\n'\\u221a'\n'\\u0192'\n'\\u2248'\n'\\u0106'\n'\\xab'\n'\\u010c'\n'\\u2026'\n'\\xa0'\n'\\xc0'\n'\\xc3'\n'\\xd5'\n'\\u0152'\n'\\u0153'\n'\\u0110'\n'\\u2014'\n'\\u201c'\n'\\u201d'\n'\\u2018'\n'\\u2019'\n'\\xf7'\n'\\u25ca'\n'\\uf8ff'\n'\\xa9'\n'\\u2044'\n'\\u20ac'\n'\\u2039'\n'\\u203a'\n'\\xc6'\n'\\xbb'\n'\\u2013'\n'\\xb7'\n'\\u201a'\n'\\u201e'\n'\\u2030'\n'\\xc2'\n'\\u0107'\n'\\xc1'\n'\\u010d'\n'\\xc8'\n'\\xcd'\n'\\xce'\n'\\xcf'\n'\\xcc'\n'\\xd3'\n'\\xd4'\n'\\u0111'\n'\\xd2'\n'\\xda'\n'\\xdb'\n'\\xd9'\n'\\u0131'\n'\\u02c6'\n'\\u02dc'\n'\\xaf'\n'\\u03c0'\n'\\xcb'\n'\\u02da'\n'\\xb8'\n'\\xca'\n'\\xe6'\n'\\u02c7'\n)\n\n\nencoding_table=codecs.charmap_build(decoding_table)\n"], "copyreg": [".py", "''\n\n\n\n\n\n__all__=[\"pickle\",\"constructor\",\n\"add_extension\",\"remove_extension\",\"clear_extension_cache\"]\n\ndispatch_table={}\n\ndef pickle(ob_type,pickle_function,constructor_ob=None ):\n if not callable(pickle_function):\n raise TypeError(\"reduction functions must be callable\")\n dispatch_table[ob_type]=pickle_function\n \n \n \n if constructor_ob is not None :\n constructor(constructor_ob)\n \ndef constructor(object):\n if not callable(object):\n raise TypeError(\"constructors must be callable\")\n \n \n \ntry :\n complex\nexcept NameError:\n pass\nelse :\n\n def pickle_complex(c):\n return complex,(c.real,c.imag)\n \n pickle(complex,pickle_complex,complex)\n \n \n \ndef _reconstructor(cls,base,state):\n if base is object:\n obj=object.__new__(cls)\n else :\n obj=base.__new__(cls,state)\n if base.__init__ !=object.__init__:\n base.__init__(obj,state)\n return obj\n \n_HEAPTYPE=1 <<9\n\n\n\ndef _reduce_ex(self,proto):\n assert proto <2\n for base in self.__class__.__mro__:\n if hasattr(base,'__flags__')and not base.__flags__&_HEAPTYPE:\n break\n else :\n base=object\n if base is object:\n state=None\n else :\n if base is self.__class__:\n raise TypeError(\"can't pickle %s objects\"%base.__name__)\n state=base(self)\n args=(self.__class__,base,state)\n try :\n getstate=self.__getstate__\n except AttributeError:\n if getattr(self,\"__slots__\",None ):\n raise TypeError(\"a class that defines __slots__ without \"\n \"defining __getstate__ cannot be pickled\")\n try :\n dict=self.__dict__\n except AttributeError:\n dict=None\n else :\n dict=getstate()\n if dict:\n return _reconstructor,args,dict\n else :\n return _reconstructor,args\n \n \n \ndef __newobj__(cls,*args):\n return cls.__new__(cls,*args)\n \ndef _slotnames(cls):\n ''\n\n\n\n\n\n\n\n \n \n \n names=cls.__dict__.get(\"__slotnames__\")\n if names is not None :\n return names\n \n \n names=[]\n if not hasattr(cls,\"__slots__\"):\n \n pass\n else :\n \n for c in cls.__mro__:\n if\"__slots__\"in c.__dict__:\n slots=c.__dict__['__slots__']\n \n if isinstance(slots,str):\n slots=(slots,)\n for name in slots:\n \n if name in (\"__dict__\",\"__weakref__\"):\n continue\n \n elif name.startswith('__')and not name.endswith('__'):\n names.append('_%s%s'%(c.__name__,name))\n else :\n names.append(name)\n \n \n try :\n cls.__slotnames__=names\n except :\n pass\n \n return names\n \n \n \n \n \n \n \n \n \n \n_extension_registry={}\n_inverted_registry={}\n_extension_cache={}\n\n\n\ndef add_extension(module,name,code):\n ''\n code=int(code)\n if not 1 <=code <=0x7fffffff:\n raise ValueError(\"code out of range\")\n key=(module,name)\n if (_extension_registry.get(key)==code and\n _inverted_registry.get(code)==key):\n return\n if key in _extension_registry:\n raise ValueError(\"key %s is already registered with code %s\"%\n (key,_extension_registry[key]))\n if code in _inverted_registry:\n raise ValueError(\"code %s is already in use for key %s\"%\n (code,_inverted_registry[code]))\n _extension_registry[key]=code\n _inverted_registry[code]=key\n \ndef remove_extension(module,name,code):\n ''\n key=(module,name)\n if (_extension_registry.get(key)!=code or\n _inverted_registry.get(code)!=key):\n raise ValueError(\"key %s is not registered with code %s\"%\n (key,code))\n del _extension_registry[key]\n del _inverted_registry[code]\n if code in _extension_cache:\n del _extension_cache[code]\n \ndef clear_extension_cache():\n _extension_cache.clear()\n \n \n \n \n \n \n \n \n \n \n \n \n \n"], "_jsre": [".js", "var $module=(function($B){\n\n var _b_ = $B.builtins\n var $s=[]\n for(var $b in _b_) $s.push('var ' + $b +'=_b_[\"'+$b+'\"]')\n eval($s.join(';'))\n\n var JSObject = $B.JSObject\n\n var obj = {__class__:$module,\n __str__: function(){return \"\"}\n }\n obj.A = obj.ASCII = 256\n obj.I = obj.IGNORECASE = 2 // 'i'\n obj.L = obj.LOCALE = 4\n obj.M = obj.MULTILINE = 8 // 'm'\n obj.S = obj.DOTALL = 16\n obj.U = obj.UNICODE = 32\n obj.X = obj.VERBOSE = 64\n obj._is_valid = function(pattern) {\n if ($B.$options.re=='pyre') return false //force use of python's re module\n if ($B.$options.re=='jsre') return true //force use of brythons re module\n // FIXME: Improve\n\n if (!isinstance(pattern, str)) {\n // this is probably a SRE_PATTERN, so return false, and let\n // python's re module handle this.\n return false\n }\n var is_valid = false;\n try {\n new RegExp(pattern);\n is_valid = true;\n }\n catch(e) {}\n if (!is_valid) return false //if js won't parse the pattern return false\n\n // using reference http://www.regular-expressions.info/\n // to compare python re and javascript regex libraries\n\n // look for things javascript does not support\n // check for name capturing group\n var mylist=['?P=', '?P<', '(?#', '(?<=', '(? -1) return false\n }\n\n var re_list=['\\{,\\d+\\}']\n for(var i=0, _len_i = re_list.length; i < _len_i; i++) {\n var _re=new RegExp(re_list[i])\n if (_re.test(pattern)) return false\n }\n\n // it looks like the pattern has passed all our tests so lets assume\n // javascript can handle this pattern.\n return true\n }\n var $SRE_PatternDict = {\n __class__:$B.$type,\n __name__:'SRE_Pattern'\n }\n $SRE_PatternDict.__mro__ = [object.$dict]\n $SRE_PatternDict.findall = function(self,string){\n return obj.findall(self.pattern,string,self.flags)\n }\n $SRE_PatternDict.finditer = function(self,string){\n return obj.finditer(self.pattern,string,self.flags)\n }\n $SRE_PatternDict.match = function(self,string){\n return obj.match(self.pattern,string,self.flags)\n }\n $SRE_PatternDict.search = function(self,string){\n return obj.search(self.pattern,string,self.flags)\n }\n function normflags(flags) {\n return ((flags & obj.I)? 'i' : '') + ((flags & obj.M)? 'm' : '');\n }\n obj.compile = function(pattern,flags){\n return {\n __class__:$SRE_PatternDict,\n pattern:pattern,\n flags:normflags(flags)\n }\n }\n obj.escape = function(string){\n // Escape all the characters in pattern except ASCII letters, numbers \n // and '_'. This is useful if you want to match an arbitrary literal \n // string that may have regular expression metacharacters in it.\n var res = ''\n var ok = 'abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789_'\n for(var i=0, _len_i = string.length; i < _len_i;i++){\n if(ok.search(string.charAt(i))>-1){res += string.charAt(i)}\n }\n return res\n }\n obj.findall = function(pattern,string,flags){\n var $ns=$B.args('re.findall',2,\n {pattern:null, string:null}, ['pattern', 'string'],\n arguments,{},'args','kw') ,\n args = $ns['args'] ,\n _flags = 0;\n if(args.length>0){var flags=args[0]}\n else{var _flags = getattr($ns['kw'], 'get')('flags',0)}\n \n var flags = normflags();\n flags += 'gm'\n var jsp = new RegExp(pattern,flags) ,\n jsmatch = string.match(jsp);\n if(jsmatch===null){return []}\n return jsmatch\n }\n obj.finditer = function(pattern,string,flags){\n var $ns=$B.args('re.finditer',2,\n {pattern:null, sring:null}, ['pattern','string'],\n arguments,{},'args','kw'),\n args = $ns['args'],\n _flags = 0;\n if(args.length>0){var flags=args[0]}\n else{var _flags = getattr($ns['kw'], 'get')('flags',0)}\n \n var flags = normflags();\n flags += 'gm'\n var jsp = new RegExp(pattern,flags),\n jsmatch = string.match(jsp);\n if(jsmatch===null){return []}\n \n var _list=[]\n for (var j=0, _len_j = jsmatch.length; j < _len_j; j++) {\n var mo = {}\n mo._match=jsmatch[j]\n mo.group = function(){\n var res = []\n for(var i=0, _len_i = arguments.length; i < _len_i;i++){\n if(jsmatch[arguments[i]]===undefined){res.push(None)}\n else{res.push(jsmatch[arguments[i]])}\n }\n if(arguments.length===1){return res[0]}\n return tuple(res)\n }\n mo.groups = function(_default){\n if(_default===undefined){_default=None}\n var res = []\n for(var i=1, _len_i = jsmatch.length; i < _len_i;i++){\n if(jsmatch[i]===undefined){res.push(_default)}\n else{res.push(jsmatch[i])}\n }\n return tuple(res)\n }\n mo.start = function(){return mo._match.index}\n mo.end = function(){return mo._match.length-mo._match.index}\n mo.string = string\n _list.push(JSObject(mo))\n }\n return _list\n }\n obj.search = function(pattern,string){\n var $ns=$B.args('re.search', 2,\n {pattern:null, string:null},['pattern','string'],\n arguments,{},'args','kw')\n var args = $ns['args']\n if(args.length>0){var flags=args[0]}\n else{var flags = getattr($ns['kw'],'get')('flags','')}\n flags = normflags(flags);\n var jsp = new RegExp(pattern,flags)\n var jsmatch = string.match(jsp)\n if(jsmatch===null){return None}\n var mo = new Object()\n mo.group = function(){\n var res = []\n for(var i=0, _len_i = arguments.length; i < _len_i;i++){\n if(jsmatch[arguments[i]]===undefined){res.push(None)}\n else{res.push(jsmatch[arguments[i]])}\n }\n if(arguments.length===1){return res[0]}\n return tuple(res)\n }\n mo.groups = function(_default){\n if(_default===undefined){_default=None}\n var res = []\n for(var i=1, _len_i = jsmatch.length; i < _len_i;i++){\n if(jsmatch[i]===undefined){res.push(_default)}\n else{res.push(jsmatch[i])}\n }\n return tuple(res)\n }\n mo.start = function(){return jsmatch.index}\n mo.end = function(){return jsmatch.length-jsmatch.index}\n mo.string = string\n return JSObject(mo)\n }\n obj.sub = function(pattern,repl,string){\n var $ns=$B.args('re.search', 3,\n {pattern:null, repl:null, string:null}, \n ['pattern','repl','string'],\n arguments,{},'args','kw')\n for($var in $ns){eval(\"var \"+$var+\"=$ns[$var]\")}\n var args = $ns['args']\n var count = _b_.dict.$dict.get($ns['kw'],'count',0)\n var flags = _b_.dict.$dict.get($ns['kw'],'flags','')\n if(args.length>0){var count=args[0]}\n if(args.length>1){var flags=args[1]}\n flags = normflags(flags);\n if(typeof repl===\"string\"){\n // backreferences are \\1, \\2... in Python but $1,$2... in Javascript\n repl = repl.replace(/\\\\(\\d+)/g,'$$$1')\n }else if(typeof repl===\"function\"){\n // the argument passed to the Python function is the match object\n // the arguments passed to the Javascript function are :\n // - the matched substring\n // - the matched groups\n // - the offset of the matched substring inside the string\n // - the string being examined\n var $repl1 = function(){\n var mo = Object()\n mo.string = arguments[arguments.length-1]\n var start = arguments[arguments.length-2]\n var end = start + arguments[0].length\n mo.start = function(){return start}\n mo.end = function(){return end}\n groups = []\n for(var i=1, _len_i = arguments.length-2; i < _len_i;i++){groups.push(arguments[i])}\n mo.groups = function(_default){\n if(_default===undefined){_default=None}\n var res = []\n for(var i=0, _len_i = groups.length; i < _len_i;i++){\n if(groups[i]===undefined){res.push(_default)}\n else{res.push(groups[i])}\n }\n return res\n }\n return repl(JSObject(mo))\n }\n }\n if(count==0){flags+='g'}\n var jsp = new RegExp(pattern,flags)\n if(typeof repl==='function'){return string.replace(jsp,$repl1)}\n else{return string.replace(jsp,repl)}\n }\n obj.match = (function(search_func){\n return function(){\n // match is like search but pattern must start with ^\n var pattern = arguments[0]\n if(pattern.charAt(0)!=='^'){pattern = '^'+pattern}\n var args = [pattern]\n for(var i=1, _len_i = arguments.length; i < _len_i;i++){args.push(arguments[i])}\n return search_func.apply(null,args)\n }\n })(obj.search)\n\n return obj\n}\n)(__BRYTHON__)\n"], "signal": [".py", "''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\nCTRL_BREAK_EVENT=1\nCTRL_C_EVENT=0\nNSIG=23\nSIGABRT=22\nSIGBREAK=21\nSIGFPE=8\nSIGILL=4\nSIGINT=2\nSIGSEGV=11\nSIGTERM=15\nSIG_DFL=0\nSIG_IGN=1\n\ndef signal(signalnum,handler):\n pass\n"], "browser.session_storage": [".py", "\nimport sys\nfrom browser import window\nfrom .local_storage import LocalStorage\n\nhas_session_storage=hasattr(window,'sessionStorage')\n\nclass SessionStorage(LocalStorage):\n\n storage_type=\"session_storage\"\n \n def __init__(self):\n if not has_session_storage:\n raise EnvironmentError(\"SessionStorage not available\")\n self.store=window.sessionStorage\n \nif has_session_storage:\n storage=SessionStorage()\n"], "_profile": [".js", "// Private interface to the profiling instrumentation implemented in py_utils.js.\n// Uses local a copy of the eval function from py_builtin_functions.js\n\nvar $module=(function($B) {\n eval($B.InjectBuiltins());\n return {\n brython:$B,\n data:$B.$profile_data,\n start:$B.$profile.start,\n stop:$B.$profile.stop,\n pause:$B.$profile.pause,\n status:$B.$profile.status,\n clear:$B.$profile.clear,\n run:function(src,_globals,_locals,nruns) {\n var current_frame = $B.frames_stack[$B.frames_stack.length-1]\n if(current_frame!==undefined){\n var current_locals_id = current_frame[0].replace(/\\./,'_'),\n current_globals_id = current_frame[2].replace(/\\./,'_')\n }\n\n var is_exec = true, leave = false\n\n if(src.__class__===$B.$CodeObjectDict){\n src = src.source\n }\n\n // code will be run in a specific block\n var globals_id = '$profile_'+$B.UUID(),\n locals_id,\n parent_block_id\n if(_locals===_globals || _locals===undefined){\n locals_id = globals_id\n }else{\n locals_id = '$profile_'+$B.UUID()\n }\n // Initialise the object for block namespaces\n eval('var $locals_'+globals_id+' = {}\\nvar $locals_'+locals_id+' = {}')\n\n // Initialise block globals\n if(_globals===undefined){\n var gobj = current_frame[3],\n ex = ''\n for(var attr in current_frame[3]){\n ex == '$locals_'+globals_id+'[\"'+attr+\n '\"] = gobj[\"'+attr+'\"]';\n }\n parent_block_id = current_globals_id\n ex += 'var $locals_'+current_globals_id+'=gobj;'\n eval(ex)\n }else{\n $B.bound[globals_id] = {}\n var items = _b_.dict.$dict.items(_globals), item\n while(1){\n try{\n var item = _b_.next(items)\n eval('$locals_'+globals_id+'[\"'+item[0]+'\"] = item[1]')\n $B.bound[globals_id][item[0]]=true\n }catch(err){\n break\n }\n }\n parent_block_id = '__builtins__'\n }\n\n // Initialise block locals\n if(_locals===undefined){\n if(_globals!==undefined){\n eval('var $locals_'+locals_id+' = $locals_'+globals_id)\n }else{\n var lobj = current_frame[1],\n ex = ''\n for(var attr in current_frame[1]){\n ex += '$locals_'+locals_id+'[\"'+attr+\n '\"] = current_frame[1][\"'+attr+'\"];'\n }\n eval(ex)\n }\n }else{\n var items = _b_.dict.$dict.items(_locals), item\n while(1){\n try{\n var item = _b_.next(items)\n eval('$locals_'+locals_id+'[\"'+item[0]+'\"] = item[1]')\n }catch(err){\n break\n }\n }\n }\n //var nb_modules = Object.keys(__BRYTHON__.modules).length\n //console.log('before exec', nb_modules)\n\n var root = $B.py2js(src, globals_id, locals_id, parent_block_id),\n leave_frame = true\n\n try{\n\n var js = root.to_js()\n\n if ($B.async_enabled) js=$B.execution_object.source_conversion(js)\n\n var i,res,gns;\n for(i=0;i','>=','in','not in','is',\n'is not','exception match','BAD')\n\nhasconst=[]\nhasname=[]\nhasjrel=[]\nhasjabs=[]\nhaslocal=[]\nhascompare=[]\nhasfree=[]\nhasnargs=[]\n\nopmap={}\nopname=['']*256\nfor op in range(256):opname[op]='<%r>'%(op,)\ndel op\n\ndef def_op(name,op):\n opname[op]=name\n opmap[name]=op\n \ndef name_op(name,op):\n def_op(name,op)\n hasname.append(op)\n \ndef jrel_op(name,op):\n def_op(name,op)\n hasjrel.append(op)\n \ndef jabs_op(name,op):\n def_op(name,op)\n hasjabs.append(op)\n \n \n \n \ndef_op('POP_TOP',1)\ndef_op('ROT_TWO',2)\ndef_op('ROT_THREE',3)\ndef_op('DUP_TOP',4)\ndef_op('DUP_TOP_TWO',5)\n\ndef_op('NOP',9)\ndef_op('UNARY_POSITIVE',10)\ndef_op('UNARY_NEGATIVE',11)\ndef_op('UNARY_NOT',12)\n\ndef_op('UNARY_INVERT',15)\n\ndef_op('BINARY_POWER',19)\ndef_op('BINARY_MULTIPLY',20)\n\ndef_op('BINARY_MODULO',22)\ndef_op('BINARY_ADD',23)\ndef_op('BINARY_SUBTRACT',24)\ndef_op('BINARY_SUBSCR',25)\ndef_op('BINARY_FLOOR_DIVIDE',26)\ndef_op('BINARY_TRUE_DIVIDE',27)\ndef_op('INPLACE_FLOOR_DIVIDE',28)\ndef_op('INPLACE_TRUE_DIVIDE',29)\n\ndef_op('STORE_MAP',54)\ndef_op('INPLACE_ADD',55)\ndef_op('INPLACE_SUBTRACT',56)\ndef_op('INPLACE_MULTIPLY',57)\n\ndef_op('INPLACE_MODULO',59)\ndef_op('STORE_SUBSCR',60)\ndef_op('DELETE_SUBSCR',61)\ndef_op('BINARY_LSHIFT',62)\ndef_op('BINARY_RSHIFT',63)\ndef_op('BINARY_AND',64)\ndef_op('BINARY_XOR',65)\ndef_op('BINARY_OR',66)\ndef_op('INPLACE_POWER',67)\ndef_op('GET_ITER',68)\n\ndef_op('PRINT_EXPR',70)\ndef_op('LOAD_BUILD_CLASS',71)\ndef_op('YIELD_FROM',72)\n\ndef_op('INPLACE_LSHIFT',75)\ndef_op('INPLACE_RSHIFT',76)\ndef_op('INPLACE_AND',77)\ndef_op('INPLACE_XOR',78)\ndef_op('INPLACE_OR',79)\ndef_op('BREAK_LOOP',80)\ndef_op('WITH_CLEANUP',81)\n\ndef_op('RETURN_VALUE',83)\ndef_op('IMPORT_STAR',84)\n\ndef_op('YIELD_VALUE',86)\ndef_op('POP_BLOCK',87)\ndef_op('END_FINALLY',88)\ndef_op('POP_EXCEPT',89)\n\nHAVE_ARGUMENT=90\n\nname_op('STORE_NAME',90)\nname_op('DELETE_NAME',91)\ndef_op('UNPACK_SEQUENCE',92)\njrel_op('FOR_ITER',93)\ndef_op('UNPACK_EX',94)\nname_op('STORE_ATTR',95)\nname_op('DELETE_ATTR',96)\nname_op('STORE_GLOBAL',97)\nname_op('DELETE_GLOBAL',98)\ndef_op('LOAD_CONST',100)\nhasconst.append(100)\nname_op('LOAD_NAME',101)\ndef_op('BUILD_TUPLE',102)\ndef_op('BUILD_LIST',103)\ndef_op('BUILD_SET',104)\ndef_op('BUILD_MAP',105)\nname_op('LOAD_ATTR',106)\ndef_op('COMPARE_OP',107)\nhascompare.append(107)\nname_op('IMPORT_NAME',108)\nname_op('IMPORT_FROM',109)\n\njrel_op('JUMP_FORWARD',110)\njabs_op('JUMP_IF_FALSE_OR_POP',111)\njabs_op('JUMP_IF_TRUE_OR_POP',112)\njabs_op('JUMP_ABSOLUTE',113)\njabs_op('POP_JUMP_IF_FALSE',114)\njabs_op('POP_JUMP_IF_TRUE',115)\n\nname_op('LOAD_GLOBAL',116)\n\njabs_op('CONTINUE_LOOP',119)\njrel_op('SETUP_LOOP',120)\njrel_op('SETUP_EXCEPT',121)\njrel_op('SETUP_FINALLY',122)\n\ndef_op('LOAD_FAST',124)\nhaslocal.append(124)\ndef_op('STORE_FAST',125)\nhaslocal.append(125)\ndef_op('DELETE_FAST',126)\nhaslocal.append(126)\n\ndef_op('RAISE_VARARGS',130)\ndef_op('CALL_FUNCTION',131)\nhasnargs.append(131)\ndef_op('MAKE_FUNCTION',132)\ndef_op('BUILD_SLICE',133)\ndef_op('MAKE_CLOSURE',134)\ndef_op('LOAD_CLOSURE',135)\nhasfree.append(135)\ndef_op('LOAD_DEREF',136)\nhasfree.append(136)\ndef_op('STORE_DEREF',137)\nhasfree.append(137)\ndef_op('DELETE_DEREF',138)\nhasfree.append(138)\n\ndef_op('CALL_FUNCTION_VAR',140)\nhasnargs.append(140)\ndef_op('CALL_FUNCTION_KW',141)\nhasnargs.append(141)\ndef_op('CALL_FUNCTION_VAR_KW',142)\nhasnargs.append(142)\n\njrel_op('SETUP_WITH',143)\n\ndef_op('LIST_APPEND',145)\ndef_op('SET_ADD',146)\ndef_op('MAP_ADD',147)\n\ndef_op('LOAD_CLASSDEREF',148)\nhasfree.append(148)\n\ndef_op('EXTENDED_ARG',144)\nEXTENDED_ARG=144\n\ndel def_op,name_op,jrel_op,jabs_op\n"], "socket": [".py", "\n\n\n\"\"\"\\\nThis module provides socket operations and some related functions.\nOn Unix, it supports IP (Internet Protocol) and Unix domain sockets.\nOn other systems, it only supports IP. Functions specific for a\nsocket are available as methods of the socket object.\n\nFunctions:\n\nsocket() -- create a new socket object\nsocketpair() -- create a pair of new socket objects [*]\nfromfd() -- create a socket object from an open file descriptor [*]\nfromshare() -- create a socket object from data received from socket.share() [*]\ngethostname() -- return the current hostname\ngethostbyname() -- map a hostname to its IP number\ngethostbyaddr() -- map an IP number or hostname to DNS info\ngetservbyname() -- map a service name and a protocol name to a port number\ngetprotobyname() -- map a protocol name (e.g. 'tcp') to a number\nntohs(), ntohl() -- convert 16, 32 bit int from network to host byte order\nhtons(), htonl() -- convert 16, 32 bit int from host to network byte order\ninet_aton() -- convert IP addr string (123.45.67.89) to 32-bit packed format\ninet_ntoa() -- convert 32-bit packed format IP to string (123.45.67.89)\nsocket.getdefaulttimeout() -- get the default timeout value\nsocket.setdefaulttimeout() -- set the default timeout value\ncreate_connection() -- connects to an address, with an optional timeout and\n optional source address.\n\n [*] not available on all platforms!\n\nSpecial objects:\n\nSocketType -- type object for socket objects\nerror -- exception raised for I/O errors\nhas_ipv6 -- boolean value indicating if IPv6 is supported\n\nInteger constants:\n\nAF_INET, AF_UNIX -- socket domains (first argument to socket() call)\nSOCK_STREAM, SOCK_DGRAM, SOCK_RAW -- socket types (second argument)\n\nMany other constants may be defined; these may be used in calls to\nthe setsockopt() and getsockopt() methods.\n\"\"\"\n\nimport _socket\nfrom _socket import *\n\nimport os,sys,io\n\ntry :\n import errno\nexcept ImportError:\n errno=None\nEBADF=getattr(errno,'EBADF',9)\nEAGAIN=getattr(errno,'EAGAIN',11)\nEWOULDBLOCK=getattr(errno,'EWOULDBLOCK',11)\n\n__all__=[\"getfqdn\",\"create_connection\"]\n__all__.extend(os._get_exports_list(_socket))\n\n\n_realsocket=socket\n\n\nif sys.platform.lower().startswith(\"win\"):\n errorTab={}\n errorTab[10004]=\"The operation was interrupted.\"\n errorTab[10009]=\"A bad file handle was passed.\"\n errorTab[10013]=\"Permission denied.\"\n errorTab[10014]=\"A fault occurred on the network??\"\n errorTab[10022]=\"An invalid operation was attempted.\"\n errorTab[10035]=\"The socket operation would block\"\n errorTab[10036]=\"A blocking operation is already in progress.\"\n errorTab[10048]=\"The network address is in use.\"\n errorTab[10054]=\"The connection has been reset.\"\n errorTab[10058]=\"The network has been shut down.\"\n errorTab[10060]=\"The operation timed out.\"\n errorTab[10061]=\"Connection refused.\"\n errorTab[10063]=\"The name is too long.\"\n errorTab[10064]=\"The host is down.\"\n errorTab[10065]=\"The host is unreachable.\"\n __all__.append(\"errorTab\")\n \n \nclass socket(_socket.socket):\n\n ''\n \n __slots__=[\"__weakref__\",\"_io_refs\",\"_closed\"]\n \n def __init__(self,family=AF_INET,type=SOCK_STREAM,proto=0,fileno=None ):\n _socket.socket.__init__(self,family,type,proto,fileno)\n self._io_refs=0\n self._closed=False\n \n def __enter__(self):\n return self\n \n def __exit__(self,*args):\n if not self._closed:\n self.close()\n \n def __repr__(self):\n ''\n s=_socket.socket.__repr__(self)\n if s.startswith(\"0:\n self._io_refs -=1\n if self._closed:\n self.close()\n \n def _real_close(self,_ss=_socket.socket):\n \n _ss.close(self)\n \n def close(self):\n \n self._closed=True\n if self._io_refs <=0:\n self._real_close()\n \n def detach(self):\n ''\n\n\n\n\n \n self._closed=True\n return super().detach()\n \ndef fromfd(fd,family,type,proto=0):\n ''\n\n\n\n \n nfd=dup(fd)\n return socket(family,type,proto,nfd)\n \nif hasattr(_socket.socket,\"share\"):\n def fromshare(info):\n ''\n\n\n\n \n return socket(0,0,0,info)\n \nif hasattr(_socket,\"socketpair\"):\n\n def socketpair(family=None ,type=SOCK_STREAM,proto=0):\n ''\n\n\n\n\n\n \n if family is None :\n try :\n family=AF_UNIX\n except NameError:\n family=AF_INET\n a,b=_socket.socketpair(family,type,proto)\n a=socket(family,type,proto,a.detach())\n b=socket(family,type,proto,b.detach())\n return a,b\n \n \n_blocking_errnos={EAGAIN,EWOULDBLOCK}\n\nclass SocketIO(io.RawIOBase):\n\n ''\n\n\n\n \n \n \n \n \n \n \n \n \n \n \n def __init__(self,sock,mode):\n if mode not in (\"r\",\"w\",\"rw\",\"rb\",\"wb\",\"rwb\"):\n raise ValueError(\"invalid mode: %r\"%mode)\n io.RawIOBase.__init__(self)\n self._sock=sock\n if\"b\"not in mode:\n mode +=\"b\"\n self._mode=mode\n self._reading=\"r\"in mode\n self._writing=\"w\"in mode\n self._timeout_occurred=False\n \n def readinto(self,b):\n ''\n\n\n\n\n\n \n self._checkClosed()\n self._checkReadable()\n if self._timeout_occurred:\n raise IOError(\"cannot read from timed out object\")\n while True :\n try :\n return self._sock.recv_into(b)\n except timeout:\n self._timeout_occurred=True\n raise\n except InterruptedError:\n continue\n except error as e:\n if e.args[0]in _blocking_errnos:\n return None\n raise\n \n def write(self,b):\n ''\n\n\n\n \n self._checkClosed()\n self._checkWritable()\n try :\n return self._sock.send(b)\n except error as e:\n \n if e.args[0]in _blocking_errnos:\n return None\n raise\n \n def readable(self):\n ''\n \n if self.closed:\n raise ValueError(\"I/O operation on closed socket.\")\n return self._reading\n \n def writable(self):\n ''\n \n if self.closed:\n raise ValueError(\"I/O operation on closed socket.\")\n return self._writing\n \n def seekable(self):\n ''\n \n if self.closed:\n raise ValueError(\"I/O operation on closed socket.\")\n return super().seekable()\n \n def fileno(self):\n ''\n \n self._checkClosed()\n return self._sock.fileno()\n \n @property\n def name(self):\n if not self.closed:\n return self.fileno()\n else :\n return -1\n \n @property\n def mode(self):\n return self._mode\n \n def close(self):\n ''\n\n \n if self.closed:\n return\n io.RawIOBase.close(self)\n self._sock._decref_socketios()\n self._sock=None\n \n \ndef getfqdn(name=''):\n ''\n\n\n\n\n\n\n \n name=name.strip()\n if not name or name =='0.0.0.0':\n name=gethostname()\n try :\n hostname,aliases,ipaddrs=gethostbyaddr(name)\n except error:\n pass\n else :\n aliases.insert(0,hostname)\n for name in aliases:\n if'.'in name:\n break\n else :\n name=hostname\n return name\n \n \n_GLOBAL_DEFAULT_TIMEOUT=object()\n\ndef create_connection(address,timeout=_GLOBAL_DEFAULT_TIMEOUT,\nsource_address=None ):\n ''\n\n\n\n\n\n\n\n\n\n \n \n host,port=address\n err=None\n for res in getaddrinfo(host,port,0,SOCK_STREAM):\n af,socktype,proto,canonname,sa=res\n sock=None\n try :\n sock=socket(af,socktype,proto)\n if timeout is not _GLOBAL_DEFAULT_TIMEOUT:\n sock.settimeout(timeout)\n if source_address:\n sock.bind(source_address)\n sock.connect(sa)\n return sock\n \n except error as _:\n err=_\n if sock is not None :\n sock.close()\n \n if err is not None :\n raise err\n else :\n raise error(\"getaddrinfo returns an empty list\")\n"], "hashlib": [".js", "var $module=(function($B){\n\nvar _b_ = $B.builtins\n\nvar $s=[]\nfor(var $b in _b_) $s.push('var ' + $b +'=_b_[\"'+$b+'\"]')\neval($s.join(';'))\n\nvar $mod = {\n\n __getattr__ : function(attr){\n if (attr == 'new') return $hashlib_new;\n return this[attr]\n },\n md5: function(obj) {return $hashlib_new('md5', obj)},\n sha1: function(obj) {return $hashlib_new('sha1', obj)},\n sha224: function(obj) {return $hashlib_new('sha224', obj)},\n sha256: function(obj) {return $hashlib_new('sha256', obj)},\n sha384: function(obj) {return $hashlib_new('sha384', obj)},\n sha512: function(obj) {return $hashlib_new('sha512', obj)},\n\n algorithms_guaranteed: ['md5', 'sha1', 'sha224', 'sha256', 'sha384', 'sha512'],\n algorithms_available: ['md5', 'sha1', 'sha224', 'sha256', 'sha384', 'sha512']\n}\n\n\n//todo: eventually move this function to a \"utility\" file or use ajax module?\nfunction $get_CryptoJS_lib(alg) {\n var imp=$importer()\n var $xmlhttp=imp[0], fake_qs=imp[1], timer=imp[2], res=null\n\n $xmlhttp.onreadystatechange = function(){\n if($xmlhttp.readyState==4){\n window.clearTimeout(timer)\n if($xmlhttp.status==200 || $xmlhttp.status==0){res=$xmlhttp.responseText}\n else{\n // don't throw an exception here, it will not be caught (issue #30)\n res = Error()\n res.name = 'NotFoundError'\n res.message = \"No CryptoJS lib named '\"+alg+\"'\"\n }\n }\n }\n\n $xmlhttp.open('GET', $B.brython_path+'libs/crypto_js/rollups/'+alg+'.js'+fake_qs,false)\n if('overrideMimeType' in $xmlhttp){$xmlhttp.overrideMimeType(\"text/plain\")}\n $xmlhttp.send()\n if(res.constructor===Error){throw res} // module not found\n\n try{\n eval(res + \"; $B.CryptoJS=CryptoJS;\")\n } catch (err) { \n throw Error(\"JS Eval Error\", \"Cannot eval CryptoJS algorithm '\" + alg + \"' : error:\" + err);\n }\n}\n\nfunction bytes2WordArray(obj){\n // Transform a bytes object into an instance of class WordArray\n // defined in CryptoJS\n if(!_b_.isinstance(obj, _b_.bytes)){\n throw _b_.TypeError(\"expected bytes, got \"+\n $B.get_class(obj).__name__)\n }\n\n var words = []\n for(var i=0;i>>2]|=(g[k>>>2]>>>24-8*(k%4)&255)<<24-8*((e+k)%4);else if(65535>>2]=g[k>>>2];else b.push.apply(b,g);this.sigBytes+=a;return this},clamp:function(){var n=this.words,b=this.sigBytes;n[b>>>2]&=4294967295<<\n32-8*(b%4);n.length=a.ceil(b/4)},clone:function(){var a=m.clone.call(this);a.words=this.words.slice(0);return a},random:function(n){for(var b=[],g=0;g>>2]>>>24-8*(e%4)&255;g.push((k>>>4).toString(16));g.push((k&15).toString(16))}return g.join(\"\")},parse:function(a){for(var b=a.length,g=[],e=0;e>>3]|=parseInt(a.substr(e,\n2),16)<<24-4*(e%8);return new B.init(g,b/2)}},F=v.Latin1={stringify:function(a){var b=a.words;a=a.sigBytes;for(var g=[],e=0;e>>2]>>>24-8*(e%4)&255));return g.join(\"\")},parse:function(a){for(var b=a.length,g=[],e=0;e>>2]|=(a.charCodeAt(e)&255)<<24-8*(e%4);return new B.init(g,b)}},ha=v.Utf8={stringify:function(a){try{return decodeURIComponent(escape(F.stringify(a)))}catch(b){throw Error(\"Malformed UTF-8 data\");}},parse:function(a){return F.parse(unescape(encodeURIComponent(a)))}},\nZ=j.BufferedBlockAlgorithm=m.extend({reset:function(){this._data=new B.init;this._nDataBytes=0},_append:function(a){\"string\"==typeof a&&(a=ha.parse(a));this._data.concat(a);this._nDataBytes+=a.sigBytes},_process:function(n){var b=this._data,g=b.words,e=b.sigBytes,k=this.blockSize,m=e/(4*k),m=n?a.ceil(m):a.max((m|0)-this._minBufferSize,0);n=m*k;e=a.min(4*n,e);if(n){for(var c=0;cy;y++)v[y]=a();j=j.SHA512=d.extend({_doReset:function(){this._hash=new m.init([new f.init(1779033703,4089235720),new f.init(3144134277,2227873595),new f.init(1013904242,4271175723),new f.init(2773480762,1595750129),new f.init(1359893119,2917565137),new f.init(2600822924,725511199),new f.init(528734635,4215389547),new f.init(1541459225,327033209)])},_doProcessBlock:function(a,c){for(var d=this._hash.words,\nf=d[0],j=d[1],b=d[2],g=d[3],e=d[4],k=d[5],m=d[6],d=d[7],y=f.high,M=f.low,$=j.high,N=j.low,aa=b.high,O=b.low,ba=g.high,P=g.low,ca=e.high,Q=e.low,da=k.high,R=k.low,ea=m.high,S=m.low,fa=d.high,T=d.low,s=y,p=M,G=$,D=N,H=aa,E=O,W=ba,I=P,t=ca,q=Q,U=da,J=R,V=ea,K=S,X=fa,L=T,u=0;80>u;u++){var z=v[u];if(16>u)var r=z.high=a[c+2*u]|0,h=z.low=a[c+2*u+1]|0;else{var r=v[u-15],h=r.high,w=r.low,r=(h>>>1|w<<31)^(h>>>8|w<<24)^h>>>7,w=(w>>>1|h<<31)^(w>>>8|h<<24)^(w>>>7|h<<25),C=v[u-2],h=C.high,l=C.low,C=(h>>>19|l<<\n13)^(h<<3|l>>>29)^h>>>6,l=(l>>>19|h<<13)^(l<<3|h>>>29)^(l>>>6|h<<26),h=v[u-7],Y=h.high,A=v[u-16],x=A.high,A=A.low,h=w+h.low,r=r+Y+(h>>>0>>0?1:0),h=h+l,r=r+C+(h>>>0>>0?1:0),h=h+A,r=r+x+(h>>>0>>0?1:0);z.high=r;z.low=h}var Y=t&U^~t&V,A=q&J^~q&K,z=s&G^s&H^G&H,ja=p&D^p&E^D&E,w=(s>>>28|p<<4)^(s<<30|p>>>2)^(s<<25|p>>>7),C=(p>>>28|s<<4)^(p<<30|s>>>2)^(p<<25|s>>>7),l=B[u],ka=l.high,ga=l.low,l=L+((q>>>14|t<<18)^(q>>>18|t<<14)^(q<<23|t>>>9)),x=X+((t>>>14|q<<18)^(t>>>18|q<<14)^(t<<23|q>>>9))+(l>>>0<\nL>>>0?1:0),l=l+A,x=x+Y+(l>>>0>>0?1:0),l=l+ga,x=x+ka+(l>>>0>>0?1:0),l=l+h,x=x+r+(l>>>0>>0?1:0),h=C+ja,z=w+z+(h>>>0>>0?1:0),X=V,L=K,V=U,K=J,U=t,J=q,q=I+l|0,t=W+x+(q>>>0>>0?1:0)|0,W=H,I=E,H=G,E=D,G=s,D=p,p=l+h|0,s=x+z+(p>>>0>>0?1:0)|0}M=f.low=M+p;f.high=y+s+(M>>>0

    >>0?1:0);N=j.low=N+D;j.high=$+G+(N>>>0>>0?1:0);O=b.low=O+E;b.high=aa+H+(O>>>0>>0?1:0);P=g.low=P+I;g.high=ba+W+(P>>>0>>0?1:0);Q=e.low=Q+q;e.high=ca+t+(Q>>>0>>0?1:0);R=k.low=R+J;k.high=da+U+(R>>>0>>0?1:0);\nS=m.low=S+K;m.high=ea+V+(S>>>0>>0?1:0);T=d.low=T+L;d.high=fa+X+(T>>>0>>0?1:0)},_doFinalize:function(){var a=this._data,c=a.words,d=8*this._nDataBytes,f=8*a.sigBytes;c[f>>>5]|=128<<24-f%32;c[(f+128>>>10<<5)+30]=Math.floor(d/4294967296);c[(f+128>>>10<<5)+31]=d;a.sigBytes=4*c.length;this._process();return this._hash.toX32()},clone:function(){var a=d.clone.call(this);a._hash=this._hash.clone();return a},blockSize:32});c.SHA512=d._createHelper(j);c.HmacSHA512=d._createHmacHelper(j)})();\n(function(){var a=CryptoJS,c=a.x64,d=c.Word,j=c.WordArray,c=a.algo,f=c.SHA512,c=c.SHA384=f.extend({_doReset:function(){this._hash=new j.init([new d.init(3418070365,3238371032),new d.init(1654270250,914150663),new d.init(2438529370,812702999),new d.init(355462360,4144912697),new d.init(1731405415,4290775857),new d.init(2394180231,1750603025),new d.init(3675008525,1694076839),new d.init(1203062813,3204075428)])},_doFinalize:function(){var a=f._doFinalize.call(this);a.sigBytes-=16;return a}});a.SHA384=\nf._createHelper(c);a.HmacSHA384=f._createHmacHelper(c)})();\n"], "decimal": [".py", "\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\"\"\"\nThis is an implementation of decimal floating point arithmetic based on\nthe General Decimal Arithmetic Specification:\n\n http://speleotrove.com/decimal/decarith.html\n\nand IEEE standard 854-1987:\n\n http://en.wikipedia.org/wiki/IEEE_854-1987\n\nDecimal floating point has finite precision with arbitrarily large bounds.\n\nThe purpose of this module is to support arithmetic using familiar\n\"schoolhouse\" rules and to avoid some of the tricky representation\nissues associated with binary floating point. The package is especially\nuseful for financial applications or for contexts where users have\nexpectations that are at odds with binary floating point (for instance,\nin binary floating point, 1.00 % 0.1 gives 0.09999999999999995 instead\nof 0.0; Decimal('1.00') % Decimal('0.1') returns the expected\nDecimal('0.00')).\n\nHere are some examples of using the decimal module:\n\n>>> from decimal import *\n>>> setcontext(ExtendedContext)\n>>> Decimal(0)\nDecimal('0')\n>>> Decimal('1')\nDecimal('1')\n>>> Decimal('-.0123')\nDecimal('-0.0123')\n>>> Decimal(123456)\nDecimal('123456')\n>>> Decimal('123.45e12345678')\nDecimal('1.2345E+12345680')\n>>> Decimal('1.33') + Decimal('1.27')\nDecimal('2.60')\n>>> Decimal('12.34') + Decimal('3.87') - Decimal('18.41')\nDecimal('-2.20')\n>>> dig = Decimal(1)\n>>> print(dig / Decimal(3))\n0.333333333\n>>> getcontext().prec = 18\n>>> print(dig / Decimal(3))\n0.333333333333333333\n>>> print(dig.sqrt())\n1\n>>> print(Decimal(3).sqrt())\n1.73205080756887729\n>>> print(Decimal(3) ** 123)\n4.85192780976896427E+58\n>>> inf = Decimal(1) / Decimal(0)\n>>> print(inf)\nInfinity\n>>> neginf = Decimal(-1) / Decimal(0)\n>>> print(neginf)\n-Infinity\n>>> print(neginf + inf)\nNaN\n>>> print(neginf * inf)\n-Infinity\n>>> print(dig / 0)\nInfinity\n>>> getcontext().traps[DivisionByZero] = 1\n>>> print(dig / 0)\nTraceback (most recent call last):\n ...\n ...\n ...\ndecimal.DivisionByZero: x / 0\n>>> c = Context()\n>>> c.traps[InvalidOperation] = 0\n>>> print(c.flags[InvalidOperation])\n0\n>>> c.divide(Decimal(0), Decimal(0))\nDecimal('NaN')\n>>> c.traps[InvalidOperation] = 1\n>>> print(c.flags[InvalidOperation])\n1\n>>> c.flags[InvalidOperation] = 0\n>>> print(c.flags[InvalidOperation])\n0\n>>> print(c.divide(Decimal(0), Decimal(0)))\nTraceback (most recent call last):\n ...\n ...\n ...\ndecimal.InvalidOperation: 0 / 0\n>>> print(c.flags[InvalidOperation])\n1\n>>> c.flags[InvalidOperation] = 0\n>>> c.traps[InvalidOperation] = 0\n>>> print(c.divide(Decimal(0), Decimal(0)))\nNaN\n>>> print(c.flags[InvalidOperation])\n1\n>>>\n\"\"\"\n\n__all__=[\n\n'Decimal','Context',\n\n\n'DefaultContext','BasicContext','ExtendedContext',\n\n\n'DecimalException','Clamped','InvalidOperation','DivisionByZero',\n'Inexact','Rounded','Subnormal','Overflow','Underflow',\n'FloatOperation',\n\n\n'ROUND_DOWN','ROUND_HALF_UP','ROUND_HALF_EVEN','ROUND_CEILING',\n'ROUND_FLOOR','ROUND_UP','ROUND_HALF_DOWN','ROUND_05UP',\n\n\n'setcontext','getcontext','localcontext',\n\n\n'MAX_PREC','MAX_EMAX','MIN_EMIN','MIN_ETINY',\n\n\n'HAVE_THREADS'\n]\n\n__version__='1.70'\n\n\n\n\nimport math as _math\nimport numbers as _numbers\nimport sys\n\ntry :\n from collections import namedtuple as _namedtuple\n DecimalTuple=_namedtuple('DecimalTuple','sign digits exponent')\nexcept ImportError:\n DecimalTuple=lambda *args:args\n \n \nimport _jsre as re\n\n\nROUND_DOWN='ROUND_DOWN'\nROUND_HALF_UP='ROUND_HALF_UP'\nROUND_HALF_EVEN='ROUND_HALF_EVEN'\nROUND_CEILING='ROUND_CEILING'\nROUND_FLOOR='ROUND_FLOOR'\nROUND_UP='ROUND_UP'\nROUND_HALF_DOWN='ROUND_HALF_DOWN'\nROUND_05UP='ROUND_05UP'\n\n\nHAVE_THREADS=True\nif sys.maxsize ==2 **63 -1:\n MAX_PREC=999999999999999999\n MAX_EMAX=999999999999999999\n MIN_EMIN=-999999999999999999\nelse :\n MAX_PREC=425000000\n MAX_EMAX=425000000\n MIN_EMIN=-425000000\n \nMIN_ETINY=MIN_EMIN -(MAX_PREC -1)\n\n\n\nclass DecimalException(ArithmeticError):\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n def handle(self,context,*args):\n pass\n \n \nclass Clamped(DecimalException):\n ''\n\n\n\n\n\n\n\n\n \n \n \n pass\n \nclass InvalidOperation(DecimalException):\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n def handle(self,context,*args):\n if args:\n ans=_dec_from_triple(args[0]._sign,args[0]._int,'n',True )\n return ans._fix_nan(context)\n return _NaN\n \nclass ConversionSyntax(InvalidOperation):\n ''\n\n\n\n\n \n def handle(self,context,*args):\n return _NaN\n \nclass DivisionByZero(DecimalException,ZeroDivisionError):\n ''\n\n\n\n\n\n\n\n\n\n \n \n def handle(self,context,sign,*args):\n return _SignedInfinity[sign]\n \nclass DivisionImpossible(InvalidOperation):\n ''\n\n\n\n\n \n \n def handle(self,context,*args):\n return _NaN\n \nclass DivisionUndefined(InvalidOperation,ZeroDivisionError):\n ''\n\n\n\n\n \n \n def handle(self,context,*args):\n return _NaN\n \nclass Inexact(DecimalException):\n ''\n\n\n\n\n\n\n\n\n \n \n \n pass\n \nclass InvalidContext(InvalidOperation):\n ''\n\n\n\n\n\n\n\n \n \n def handle(self,context,*args):\n return _NaN\n \nclass Rounded(DecimalException):\n ''\n\n\n\n\n\n\n\n\n \n \n pass\n \nclass Subnormal(DecimalException):\n ''\n\n\n\n\n\n\n\n \n \n pass\n \nclass Overflow(Inexact,Rounded):\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n \n def handle(self,context,sign,*args):\n if context.rounding in (ROUND_HALF_UP,ROUND_HALF_EVEN,\n ROUND_HALF_DOWN,ROUND_UP):\n return _SignedInfinity[sign]\n if sign ==0:\n if context.rounding ==ROUND_CEILING:\n return _SignedInfinity[sign]\n return _dec_from_triple(sign,'9'*context.prec,\n context.Emax -context.prec+1)\n if sign ==1:\n if context.rounding ==ROUND_FLOOR:\n return _SignedInfinity[sign]\n return _dec_from_triple(sign,'9'*context.prec,\n context.Emax -context.prec+1)\n \n \nclass Underflow(Inexact,Rounded,Subnormal):\n ''\n\n\n\n\n\n\n\n\n\n\n\n \n \n pass\n \nclass FloatOperation(DecimalException,TypeError):\n ''\n\n\n\n\n\n\n\n\n\n\n\n \n \n pass\n \n \n_signals=[Clamped,DivisionByZero,Inexact,Overflow,Rounded,\nUnderflow,InvalidOperation,Subnormal,FloatOperation]\n\n\n_condition_map={ConversionSyntax:InvalidOperation,\nDivisionImpossible:InvalidOperation,\nDivisionUndefined:InvalidOperation,\nInvalidContext:InvalidOperation}\n\n\n_rounding_modes=(ROUND_DOWN,ROUND_HALF_UP,ROUND_HALF_EVEN,ROUND_CEILING,\nROUND_FLOOR,ROUND_UP,ROUND_HALF_DOWN,ROUND_05UP)\n\n\n\n\n\n\n\n\n\ntry :\n import threading\nexcept ImportError:\n\n class MockThreading(object):\n def local(self,sys=sys):\n return sys.modules[__name__]\n threading=MockThreading()\n del MockThreading\n \ntry :\n threading.local\n \nexcept AttributeError:\n\n\n\n if hasattr(threading.current_thread(),'__decimal_context__'):\n del threading.current_thread().__decimal_context__\n \n def setcontext(context):\n ''\n if context in (DefaultContext,BasicContext,ExtendedContext):\n context=context.copy()\n context.clear_flags()\n threading.current_thread().__decimal_context__=context\n \n def getcontext():\n ''\n\n\n\n\n \n try :\n return threading.current_thread().__decimal_context__\n except AttributeError:\n context=Context()\n threading.current_thread().__decimal_context__=context\n return context\n \nelse :\n\n local=threading.local()\n if hasattr(local,'__decimal_context__'):\n del local.__decimal_context__\n \n def getcontext(_local=local):\n ''\n\n\n\n\n \n try :\n return _local.__decimal_context__\n except AttributeError:\n context=Context()\n _local.__decimal_context__=context\n return context\n \n def setcontext(context,_local=local):\n ''\n if context in (DefaultContext,BasicContext,ExtendedContext):\n context=context.copy()\n context.clear_flags()\n _local.__decimal_context__=context\n \n del threading,local\n \ndef localcontext(ctx=None ):\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n if ctx is None :ctx=getcontext()\n return _ContextManager(ctx)\n \n \n \n \n \n \n \n \nclass Decimal(object):\n ''\n \n __slots__=('_exp','_int','_sign','_is_special')\n \n \n \n \n \n def __new__(cls,value=\"0\",context=None ):\n ''\n\n\n\n\n\n\n\n\n\n\n\n \n \n \n \n \n \n \n \n \n \n self=object.__new__(cls)\n \n \n \n if isinstance(value,str):\n value=value.strip().lower()\n \n if value.startswith(\"-\"):\n self._sign=1\n value=value[1:]\n else :\n self._sign=0\n \n if value in ('','nan'):\n self._is_special=True\n self._int=''\n \n \n \n \n self._exp='n'\n return self\n \n if value in ('inf','infinity'):\n self._int='0'\n self._exp='F'\n self._is_special=True\n return self\n \n \n _m=re.match(\"^\\d*\\.?\\d*(e\\+?\\d*)?$\",value)\n if not _m:\n self._is_special=True\n self._int=''\n self._exp='n'\n return self\n \n if'.'in value:\n intpart,fracpart=value.split('.')\n if'e'in fracpart:\n fracpart,exp=fracpart.split('e')\n exp=int(exp)\n else :\n exp=0\n \n \n self._int=intpart+fracpart\n self._exp=exp -len(fracpart)\n self._is_special=False\n return self\n else :\n \n self._is_special=False\n if'e'in value:\n self._int,_exp=value.split('e')\n self._exp=int(_exp)\n \n else :\n self._int=value\n self._exp=0\n return self\n \n \n \n if context is None :\n context=getcontext()\n return context._raise_error(ConversionSyntax,\n \"Invalid literal for Decimal: %r\"%value)\n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n if isinstance(value,int):\n if value >=0:\n self._sign=0\n else :\n self._sign=1\n self._exp=0\n self._int=str(abs(value))\n self._is_special=False\n return self\n \n \n if isinstance(value,Decimal):\n self._exp=value._exp\n self._sign=value._sign\n self._int=value._int\n self._is_special=value._is_special\n return self\n \n \n if isinstance(value,_WorkRep):\n self._sign=value.sign\n self._int=str(value.int)\n self._exp=int(value.exp)\n self._is_special=False\n return self\n \n \n if isinstance(value,(list,tuple)):\n if len(value)!=3:\n raise ValueError('Invalid tuple size in creation of Decimal '\n 'from list or tuple. The list or tuple '\n 'should have exactly three elements.')\n \n if not (isinstance(value[0],int)and value[0]in (0,1)):\n raise ValueError(\"Invalid sign. The first value in the tuple \"\n \"should be an integer; either 0 for a \"\n \"positive number or 1 for a negative number.\")\n self._sign=value[0]\n if value[2]=='F':\n \n self._int='0'\n self._exp=value[2]\n self._is_special=True\n else :\n \n digits=[]\n for digit in value[1]:\n if isinstance(digit,int)and 0 <=digit <=9:\n \n if digits or digit !=0:\n digits.append(digit)\n else :\n raise ValueError(\"The second value in the tuple must \"\n \"be composed of integers in the range \"\n \"0 through 9.\")\n if value[2]in ('n','N'):\n \n self._int=''.join(map(str,digits))\n self._exp=value[2]\n self._is_special=True\n elif isinstance(value[2],int):\n \n self._int=''.join(map(str,digits or [0]))\n self._exp=value[2]\n self._is_special=False\n else :\n raise ValueError(\"The third value in the tuple must \"\n \"be an integer, or one of the \"\n \"strings 'F', 'n', 'N'.\")\n return self\n \n if isinstance(value,float):\n if context is None :\n context=getcontext()\n context._raise_error(FloatOperation,\n \"strict semantics for mixing floats and Decimals are \"\n \"enabled\")\n value=Decimal.from_float(value)\n self._exp=value._exp\n self._sign=value._sign\n self._int=value._int\n self._is_special=value._is_special\n return self\n \n raise TypeError(\"Cannot convert %r to Decimal\"%value)\n \n \n \n def from_float(cls,f):\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n if isinstance(f,int):\n return cls(f)\n if not isinstance(f,float):\n raise TypeError(\"argument must be int or float.\")\n if _math.isinf(f)or _math.isnan(f):\n return cls(repr(f))\n if _math.copysign(1.0,f)==1.0:\n sign=0\n else :\n sign=1\n n,d=abs(f).as_integer_ratio()\n k=d.bit_length()-1\n result=_dec_from_triple(sign,str(n *5 **k),-k)\n if cls is Decimal:\n return result\n else :\n return cls(result)\n from_float=classmethod(from_float)\n \n def _isnan(self):\n ''\n\n\n\n\n \n if self._is_special:\n exp=self._exp\n if exp =='n':\n return 1\n elif exp =='N':\n return 2\n return 0\n \n def _isinfinity(self):\n ''\n\n\n\n\n \n if self._exp =='F':\n if self._sign:\n return -1\n return 1\n return 0\n \n def _check_nans(self,other=None ,context=None ):\n ''\n\n\n\n\n\n\n \n \n self_is_nan=self._isnan()\n if other is None :\n other_is_nan=False\n else :\n other_is_nan=other._isnan()\n \n if self_is_nan or other_is_nan:\n if context is None :\n context=getcontext()\n \n if self_is_nan ==2:\n return context._raise_error(InvalidOperation,'sNaN',\n self)\n if other_is_nan ==2:\n return context._raise_error(InvalidOperation,'sNaN',\n other)\n if self_is_nan:\n return self._fix_nan(context)\n \n return other._fix_nan(context)\n return 0\n \n def _compare_check_nans(self,other,context):\n ''\n\n\n\n\n\n\n\n\n \n if context is None :\n context=getcontext()\n \n if self._is_special or other._is_special:\n if self.is_snan():\n return context._raise_error(InvalidOperation,\n 'comparison involving sNaN',\n self)\n elif other.is_snan():\n return context._raise_error(InvalidOperation,\n 'comparison involving sNaN',\n other)\n elif self.is_qnan():\n return context._raise_error(InvalidOperation,\n 'comparison involving NaN',\n self)\n elif other.is_qnan():\n return context._raise_error(InvalidOperation,\n 'comparison involving NaN',\n other)\n return 0\n \n def __bool__(self):\n ''\n\n\n \n return self._is_special or self._int !='0'\n \n def _cmp(self,other):\n ''\n\n\n \n \n if self._is_special or other._is_special:\n self_inf=self._isinfinity()\n other_inf=other._isinfinity()\n if self_inf ==other_inf:\n return 0\n elif self_inf other_adjusted:\n return (-1)**self._sign\n else :\n return -((-1)**self._sign)\n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n def __eq__(self,other,context=None ):\n self,other=_convert_for_comparison(self,other,equality_op=True )\n if other is NotImplemented:\n return other\n if self._check_nans(other,context):\n return False\n return self._cmp(other)==0\n \n def __ne__(self,other,context=None ):\n self,other=_convert_for_comparison(self,other,equality_op=True )\n if other is NotImplemented:\n return other\n if self._check_nans(other,context):\n return True\n return self._cmp(other)!=0\n \n \n def __lt__(self,other,context=None ):\n self,other=_convert_for_comparison(self,other)\n if other is NotImplemented:\n return other\n ans=self._compare_check_nans(other,context)\n if ans:\n return False\n return self._cmp(other)<0\n \n def __le__(self,other,context=None ):\n self,other=_convert_for_comparison(self,other)\n if other is NotImplemented:\n return other\n ans=self._compare_check_nans(other,context)\n if ans:\n return False\n return self._cmp(other)<=0\n \n def __gt__(self,other,context=None ):\n self,other=_convert_for_comparison(self,other)\n if other is NotImplemented:\n return other\n ans=self._compare_check_nans(other,context)\n if ans:\n return False\n return self._cmp(other)>0\n \n def __ge__(self,other,context=None ):\n self,other=_convert_for_comparison(self,other)\n if other is NotImplemented:\n return other\n ans=self._compare_check_nans(other,context)\n if ans:\n return False\n return self._cmp(other)>=0\n \n def compare(self,other,context=None ):\n ''\n\n\n\n\n\n\n \n other=_convert_other(other,raiseit=True )\n \n \n if (self._is_special or other and other._is_special):\n ans=self._check_nans(other,context)\n if ans:\n return ans\n \n return Decimal(self._cmp(other))\n \n def __hash__(self):\n ''\n \n \n \n \n \n if self._is_special:\n if self.is_snan():\n raise TypeError('Cannot hash a signaling NaN value.')\n elif self.is_nan():\n return _PyHASH_NAN\n else :\n if self._sign:\n return -_PyHASH_INF\n else :\n return _PyHASH_INF\n \n if self._exp >=0:\n exp_hash=pow(10,self._exp,_PyHASH_MODULUS)\n else :\n exp_hash=pow(_PyHASH_10INV,-self._exp,_PyHASH_MODULUS)\n hash_=int(self._int)*exp_hash %_PyHASH_MODULUS\n ans=hash_ if self >=0 else -hash_\n return -2 if ans ==-1 else ans\n \n def as_tuple(self):\n ''\n\n\n \n return DecimalTuple(self._sign,tuple(map(int,self._int)),self._exp)\n \n def __repr__(self):\n ''\n \n return\"Decimal('%s')\"%str(self)\n \n def __str__(self,eng=False ,context=None ):\n ''\n\n\n \n \n sign=['','-'][self._sign]\n if self._is_special:\n if self._exp =='F':\n return sign+'Infinity'\n elif self._exp =='n':\n return sign+'NaN'+self._int\n else :\n return sign+'sNaN'+self._int\n \n \n leftdigits=self._exp+len(self._int)\n \n \n \n \n if self._exp <=0 and leftdigits >-6:\n \n dotplace=leftdigits\n elif not eng:\n \n dotplace=1\n elif self._int =='0':\n \n dotplace=(leftdigits+1)%3 -1\n else :\n \n dotplace=(leftdigits -1)%3+1\n \n if dotplace <=0:\n intpart='0'\n fracpart='.'+'0'*(-dotplace)+self._int\n elif dotplace >=len(self._int):\n intpart=self._int+'0'*(dotplace -len(self._int))\n fracpart=''\n else :\n intpart=self._int[:dotplace]\n fracpart='.'+self._int[dotplace:]\n if leftdigits ==dotplace:\n exp=''\n else :\n if context is None :\n context=getcontext()\n exp=['e','E'][context.capitals]+\"%+d\"%(leftdigits -dotplace)\n \n return sign+intpart+fracpart+exp\n \n def to_eng_string(self,context=None ):\n ''\n\n\n\n\n\n \n return self.__str__(eng=True ,context=context)\n \n def __neg__(self,context=None ):\n ''\n\n\n \n if self._is_special:\n ans=self._check_nans(context=context)\n if ans:\n return ans\n \n if context is None :\n context=getcontext()\n \n if not self and context.rounding !=ROUND_FLOOR:\n \n \n ans=self.copy_abs()\n else :\n ans=self.copy_negate()\n \n return ans._fix(context)\n \n def __pos__(self,context=None ):\n ''\n\n\n \n if self._is_special:\n ans=self._check_nans(context=context)\n if ans:\n return ans\n \n if context is None :\n context=getcontext()\n \n if not self and context.rounding !=ROUND_FLOOR:\n \n ans=self.copy_abs()\n else :\n ans=Decimal(self)\n \n return ans._fix(context)\n \n def __abs__(self,round=True ,context=None ):\n ''\n\n\n\n\n \n if not round:\n return self.copy_abs()\n \n if self._is_special:\n ans=self._check_nans(context=context)\n if ans:\n return ans\n \n if self._sign:\n ans=self.__neg__(context=context)\n else :\n ans=self.__pos__(context=context)\n \n return ans\n \n def __add__(self,other,context=None ):\n ''\n\n\n \n other=_convert_other(other)\n if other is NotImplemented:\n return other\n \n if context is None :\n context=getcontext()\n \n if self._is_special or other._is_special:\n ans=self._check_nans(other,context)\n if ans:\n return ans\n \n if self._isinfinity():\n \n if self._sign !=other._sign and other._isinfinity():\n return context._raise_error(InvalidOperation,'-INF + INF')\n return Decimal(self)\n if other._isinfinity():\n return Decimal(other)\n \n exp=min(self._exp,other._exp)\n negativezero=0\n if context.rounding ==ROUND_FLOOR and self._sign !=other._sign:\n \n negativezero=1\n \n if not self and not other:\n sign=min(self._sign,other._sign)\n if negativezero:\n sign=1\n ans=_dec_from_triple(sign,'0',exp)\n ans=ans._fix(context)\n return ans\n if not self:\n exp=max(exp,other._exp -context.prec -1)\n ans=other._rescale(exp,context.rounding)\n ans=ans._fix(context)\n return ans\n if not other:\n exp=max(exp,self._exp -context.prec -1)\n ans=self._rescale(exp,context.rounding)\n ans=ans._fix(context)\n return ans\n \n op1=_WorkRep(self)\n op2=_WorkRep(other)\n op1,op2=_normalize(op1,op2,context.prec)\n \n result=_WorkRep()\n if op1.sign !=op2.sign:\n \n if op1.int ==op2.int:\n ans=_dec_from_triple(negativezero,'0',exp)\n ans=ans._fix(context)\n return ans\n if op1.int =0:\n coeff,remainder=divmod(op1.int *10 **shift,op2.int)\n else :\n coeff,remainder=divmod(op1.int,op2.int *10 **-shift)\n if remainder:\n \n if coeff %5 ==0:\n coeff +=1\n else :\n \n ideal_exp=self._exp -other._exp\n while exp =op2.exp:\n op1.int *=10 **(op1.exp -op2.exp)\n else :\n op2.int *=10 **(op2.exp -op1.exp)\n q,r=divmod(op1.int,op2.int)\n if q <10 **context.prec:\n return (_dec_from_triple(sign,str(q),0),\n _dec_from_triple(self._sign,str(r),ideal_exp))\n \n \n ans=context._raise_error(DivisionImpossible,\n 'quotient too large in //, % or divmod')\n return ans,ans\n \n def __rtruediv__(self,other,context=None ):\n ''\n other=_convert_other(other)\n if other is NotImplemented:\n return other\n return other.__truediv__(self,context=context)\n \n def __divmod__(self,other,context=None ):\n ''\n\n \n other=_convert_other(other)\n if other is NotImplemented:\n return other\n \n if context is None :\n context=getcontext()\n \n ans=self._check_nans(other,context)\n if ans:\n return (ans,ans)\n \n sign=self._sign ^other._sign\n if self._isinfinity():\n if other._isinfinity():\n ans=context._raise_error(InvalidOperation,'divmod(INF, INF)')\n return ans,ans\n else :\n return (_SignedInfinity[sign],\n context._raise_error(InvalidOperation,'INF % x'))\n \n if not other:\n if not self:\n ans=context._raise_error(DivisionUndefined,'divmod(0, 0)')\n return ans,ans\n else :\n return (context._raise_error(DivisionByZero,'x // 0',sign),\n context._raise_error(InvalidOperation,'x % 0'))\n \n quotient,remainder=self._divide(other,context)\n remainder=remainder._fix(context)\n return quotient,remainder\n \n def __rdivmod__(self,other,context=None ):\n ''\n other=_convert_other(other)\n if other is NotImplemented:\n return other\n return other.__divmod__(self,context=context)\n \n def __mod__(self,other,context=None ):\n ''\n\n \n other=_convert_other(other)\n if other is NotImplemented:\n return other\n \n if context is None :\n context=getcontext()\n \n ans=self._check_nans(other,context)\n if ans:\n return ans\n \n if self._isinfinity():\n return context._raise_error(InvalidOperation,'INF % x')\n elif not other:\n if self:\n return context._raise_error(InvalidOperation,'x % 0')\n else :\n return context._raise_error(DivisionUndefined,'0 % 0')\n \n remainder=self._divide(other,context)[1]\n remainder=remainder._fix(context)\n return remainder\n \n def __rmod__(self,other,context=None ):\n ''\n other=_convert_other(other)\n if other is NotImplemented:\n return other\n return other.__mod__(self,context=context)\n \n def remainder_near(self,other,context=None ):\n ''\n\n \n if context is None :\n context=getcontext()\n \n other=_convert_other(other,raiseit=True )\n \n ans=self._check_nans(other,context)\n if ans:\n return ans\n \n \n if self._isinfinity():\n return context._raise_error(InvalidOperation,\n 'remainder_near(infinity, x)')\n \n \n if not other:\n if self:\n return context._raise_error(InvalidOperation,\n 'remainder_near(x, 0)')\n else :\n return context._raise_error(DivisionUndefined,\n 'remainder_near(0, 0)')\n \n \n if other._isinfinity():\n ans=Decimal(self)\n return ans._fix(context)\n \n \n ideal_exponent=min(self._exp,other._exp)\n if not self:\n ans=_dec_from_triple(self._sign,'0',ideal_exponent)\n return ans._fix(context)\n \n \n expdiff=self.adjusted()-other.adjusted()\n if expdiff >=context.prec+1:\n \n return context._raise_error(DivisionImpossible)\n if expdiff <=-2:\n \n ans=self._rescale(ideal_exponent,context.rounding)\n return ans._fix(context)\n \n \n op1=_WorkRep(self)\n op2=_WorkRep(other)\n if op1.exp >=op2.exp:\n op1.int *=10 **(op1.exp -op2.exp)\n else :\n op2.int *=10 **(op2.exp -op1.exp)\n q,r=divmod(op1.int,op2.int)\n \n \n \n if 2 *r+(q&1)>op2.int:\n r -=op2.int\n q +=1\n \n if q >=10 **context.prec:\n return context._raise_error(DivisionImpossible)\n \n \n sign=self._sign\n if r <0:\n sign=1 -sign\n r=-r\n \n ans=_dec_from_triple(sign,str(r),ideal_exponent)\n return ans._fix(context)\n \n def __floordiv__(self,other,context=None ):\n ''\n other=_convert_other(other)\n if other is NotImplemented:\n return other\n \n if context is None :\n context=getcontext()\n \n ans=self._check_nans(other,context)\n if ans:\n return ans\n \n if self._isinfinity():\n if other._isinfinity():\n return context._raise_error(InvalidOperation,'INF // INF')\n else :\n return _SignedInfinity[self._sign ^other._sign]\n \n if not other:\n if self:\n return context._raise_error(DivisionByZero,'x // 0',\n self._sign ^other._sign)\n else :\n return context._raise_error(DivisionUndefined,'0 // 0')\n \n return self._divide(other,context)[0]\n \n def __rfloordiv__(self,other,context=None ):\n ''\n other=_convert_other(other)\n if other is NotImplemented:\n return other\n return other.__floordiv__(self,context=context)\n \n def __float__(self):\n ''\n if self._isnan():\n if self.is_snan():\n raise ValueError(\"Cannot convert signaling NaN to float\")\n s=\"-nan\"if self._sign else\"nan\"\n else :\n s=str(self)\n return float(s)\n \n def __int__(self):\n ''\n if self._is_special:\n if self._isnan():\n raise ValueError(\"Cannot convert NaN to integer\")\n elif self._isinfinity():\n raise OverflowError(\"Cannot convert infinity to integer\")\n s=(-1)**self._sign\n if self._exp >=0:\n return s *int(self._int)*10 **self._exp\n else :\n return s *int(self._int[:self._exp]or'0')\n \n __trunc__=__int__\n \n def real(self):\n return self\n real=property(real)\n \n def imag(self):\n return Decimal(0)\n imag=property(imag)\n \n def conjugate(self):\n return self\n \n def __complex__(self):\n return complex(float(self))\n \n def _fix_nan(self,context):\n ''\n payload=self._int\n \n \n \n max_payload_len=context.prec -context.clamp\n if len(payload)>max_payload_len:\n payload=payload[len(payload)-max_payload_len:].lstrip('0')\n return _dec_from_triple(self._sign,payload,self._exp,True )\n return Decimal(self)\n \n def _fix(self,context):\n ''\n\n\n\n\n\n\n \n \n if self._is_special:\n if self._isnan():\n \n return self._fix_nan(context)\n else :\n \n return Decimal(self)\n \n \n \n Etiny=context.Etiny()\n Etop=context.Etop()\n if not self:\n exp_max=[context.Emax,Etop][context.clamp]\n new_exp=min(max(self._exp,Etiny),exp_max)\n if new_exp !=self._exp:\n context._raise_error(Clamped)\n return _dec_from_triple(self._sign,'0',new_exp)\n else :\n return Decimal(self)\n \n \n \n exp_min=len(self._int)+self._exp -context.prec\n if exp_min >Etop:\n \n ans=context._raise_error(Overflow,'above Emax',self._sign)\n context._raise_error(Inexact)\n context._raise_error(Rounded)\n return ans\n \n self_is_subnormal=exp_min 0:\n coeff=str(int(coeff)+1)\n if len(coeff)>context.prec:\n coeff=coeff[:-1]\n exp_min +=1\n \n \n if exp_min >Etop:\n ans=context._raise_error(Overflow,'above Emax',self._sign)\n else :\n ans=_dec_from_triple(self._sign,coeff,exp_min)\n \n \n \n if changed and self_is_subnormal:\n context._raise_error(Underflow)\n if self_is_subnormal:\n context._raise_error(Subnormal)\n if changed:\n context._raise_error(Inexact)\n context._raise_error(Rounded)\n if not ans:\n \n context._raise_error(Clamped)\n return ans\n \n if self_is_subnormal:\n context._raise_error(Subnormal)\n \n \n if context.clamp ==1 and self._exp >Etop:\n context._raise_error(Clamped)\n self_padded=self._int+'0'*(self._exp -Etop)\n return _dec_from_triple(self._sign,self_padded,Etop)\n \n \n return Decimal(self)\n \n \n \n \n \n \n \n \n \n \n \n def _round_down(self,prec):\n ''\n if _all_zeros(self._int,prec):\n return 0\n else :\n return -1\n \n def _round_up(self,prec):\n ''\n return -self._round_down(prec)\n \n def _round_half_up(self,prec):\n ''\n if self._int[prec]in'56789':\n return 1\n elif _all_zeros(self._int,prec):\n return 0\n else :\n return -1\n \n def _round_half_down(self,prec):\n ''\n if _exact_half(self._int,prec):\n return -1\n else :\n return self._round_half_up(prec)\n \n def _round_half_even(self,prec):\n ''\n if _exact_half(self._int,prec)and (prec ==0 or self._int[prec -1]in'02468'):\n return -1\n else :\n return self._round_half_up(prec)\n \n def _round_ceiling(self,prec):\n ''\n if self._sign:\n return self._round_down(prec)\n else :\n return -self._round_down(prec)\n \n def _round_floor(self,prec):\n ''\n if not self._sign:\n return self._round_down(prec)\n else :\n return -self._round_down(prec)\n \n def _round_05up(self,prec):\n ''\n if prec and self._int[prec -1]not in'05':\n return self._round_down(prec)\n else :\n return -self._round_down(prec)\n \n _pick_rounding_function=dict(\n ROUND_DOWN=_round_down,\n ROUND_UP=_round_up,\n ROUND_HALF_UP=_round_half_up,\n ROUND_HALF_DOWN=_round_half_down,\n ROUND_HALF_EVEN=_round_half_even,\n ROUND_CEILING=_round_ceiling,\n ROUND_FLOOR=_round_floor,\n ROUND_05UP=_round_05up,\n )\n \n def __round__(self,n=None ):\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n if n is not None :\n \n if not isinstance(n,int):\n raise TypeError('Second argument to round should be integral')\n exp=_dec_from_triple(0,'1',-n)\n return self.quantize(exp)\n \n \n if self._is_special:\n if self.is_nan():\n raise ValueError(\"cannot round a NaN\")\n else :\n raise OverflowError(\"cannot round an infinity\")\n return int(self._rescale(0,ROUND_HALF_EVEN))\n \n def __floor__(self):\n ''\n\n\n\n\n\n \n if self._is_special:\n if self.is_nan():\n raise ValueError(\"cannot round a NaN\")\n else :\n raise OverflowError(\"cannot round an infinity\")\n return int(self._rescale(0,ROUND_FLOOR))\n \n def __ceil__(self):\n ''\n\n\n\n\n\n \n if self._is_special:\n if self.is_nan():\n raise ValueError(\"cannot round a NaN\")\n else :\n raise OverflowError(\"cannot round an infinity\")\n return int(self._rescale(0,ROUND_CEILING))\n \n def fma(self,other,third,context=None ):\n ''\n\n\n\n\n\n\n\n \n \n other=_convert_other(other,raiseit=True )\n third=_convert_other(third,raiseit=True )\n \n \n \n if self._is_special or other._is_special:\n if context is None :\n context=getcontext()\n if self._exp =='N':\n return context._raise_error(InvalidOperation,'sNaN',self)\n if other._exp =='N':\n return context._raise_error(InvalidOperation,'sNaN',other)\n if self._exp =='n':\n product=self\n elif other._exp =='n':\n product=other\n elif self._exp =='F':\n if not other:\n return context._raise_error(InvalidOperation,\n 'INF * 0 in fma')\n product=_SignedInfinity[self._sign ^other._sign]\n elif other._exp =='F':\n if not self:\n return context._raise_error(InvalidOperation,\n '0 * INF in fma')\n product=_SignedInfinity[self._sign ^other._sign]\n else :\n product=_dec_from_triple(self._sign ^other._sign,\n str(int(self._int)*int(other._int)),\n self._exp+other._exp)\n \n return product.__add__(third,context)\n \n def _power_modulo(self,other,modulo,context=None ):\n ''\n \n other=_convert_other(other)\n if other is NotImplemented:\n return other\n modulo=_convert_other(modulo)\n if modulo is NotImplemented:\n return modulo\n \n if context is None :\n context=getcontext()\n \n \n \n self_is_nan=self._isnan()\n other_is_nan=other._isnan()\n modulo_is_nan=modulo._isnan()\n if self_is_nan or other_is_nan or modulo_is_nan:\n if self_is_nan ==2:\n return context._raise_error(InvalidOperation,'sNaN',\n self)\n if other_is_nan ==2:\n return context._raise_error(InvalidOperation,'sNaN',\n other)\n if modulo_is_nan ==2:\n return context._raise_error(InvalidOperation,'sNaN',\n modulo)\n if self_is_nan:\n return self._fix_nan(context)\n if other_is_nan:\n return other._fix_nan(context)\n return modulo._fix_nan(context)\n \n \n if not (self._isinteger()and\n other._isinteger()and\n modulo._isinteger()):\n return context._raise_error(InvalidOperation,\n 'pow() 3rd argument not allowed '\n 'unless all arguments are integers')\n if other <0:\n return context._raise_error(InvalidOperation,\n 'pow() 2nd argument cannot be '\n 'negative when 3rd argument specified')\n if not modulo:\n return context._raise_error(InvalidOperation,\n 'pow() 3rd argument cannot be 0')\n \n \n \n if modulo.adjusted()>=context.prec:\n return context._raise_error(InvalidOperation,\n 'insufficient precision: pow() 3rd '\n 'argument must not have more than '\n 'precision digits')\n \n \n \n if not other and not self:\n return context._raise_error(InvalidOperation,\n 'at least one of pow() 1st argument '\n 'and 2nd argument must be nonzero ;'\n '0**0 is not defined')\n \n \n if other._iseven():\n sign=0\n else :\n sign=self._sign\n \n \n \n modulo=abs(int(modulo))\n base=_WorkRep(self.to_integral_value())\n exponent=_WorkRep(other.to_integral_value())\n \n \n base=(base.int %modulo *pow(10,base.exp,modulo))%modulo\n for i in range(exponent.exp):\n base=pow(base,10,modulo)\n base=pow(base,exponent.int,modulo)\n \n return _dec_from_triple(sign,str(base),0)\n \n def _power_exact(self,other,p):\n ''\n\n\n\n\n\n\n\n\n\n\n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n x=_WorkRep(self)\n xc,xe=x.int,x.exp\n while xc %10 ==0:\n xc //=10\n xe +=1\n \n y=_WorkRep(other)\n yc,ye=y.int,y.exp\n while yc %10 ==0:\n yc //=10\n ye +=1\n \n \n \n if xc ==1:\n xe *=yc\n \n while xe %10 ==0:\n xe //=10\n ye +=1\n if ye <0:\n return None\n exponent=xe *10 **ye\n if y.sign ==1:\n exponent=-exponent\n \n if other._isinteger()and other._sign ==0:\n ideal_exponent=self._exp *int(other)\n zeros=min(exponent -ideal_exponent,p -1)\n else :\n zeros=0\n return _dec_from_triple(0,'1'+'0'*zeros,exponent -zeros)\n \n \n \n if y.sign ==1:\n last_digit=xc %10\n if last_digit in (2,4,6,8):\n \n if xc&-xc !=xc:\n return None\n \n e=_nbits(xc)-1\n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n emax=p *93 //65\n if ye >=len(str(emax)):\n return None\n \n \n e=_decimal_lshift_exact(e *yc,ye)\n xe=_decimal_lshift_exact(xe *yc,ye)\n if e is None or xe is None :\n return None\n \n if e >emax:\n return None\n xc=5 **e\n \n elif last_digit ==5:\n \n \n e=_nbits(xc)*28 //65\n xc,remainder=divmod(5 **e,xc)\n if remainder:\n return None\n while xc %5 ==0:\n xc //=5\n e -=1\n \n \n \n \n emax=p *10 //3\n if ye >=len(str(emax)):\n return None\n \n e=_decimal_lshift_exact(e *yc,ye)\n xe=_decimal_lshift_exact(xe *yc,ye)\n if e is None or xe is None :\n return None\n \n if e >emax:\n return None\n xc=2 **e\n else :\n return None\n \n if xc >=10 **p:\n return None\n xe=-e -xe\n return _dec_from_triple(0,str(xc),xe)\n \n \n if ye >=0:\n m,n=yc *10 **ye,1\n else :\n if xe !=0 and len(str(abs(yc *xe)))<=-ye:\n return None\n xc_bits=_nbits(xc)\n if xc !=1 and len(str(abs(yc)*xc_bits))<=-ye:\n return None\n m,n=yc,10 **(-ye)\n while m %2 ==n %2 ==0:\n m //=2\n n //=2\n while m %5 ==n %5 ==0:\n m //=5\n n //=5\n \n \n if n >1:\n \n if xc !=1 and xc_bits <=n:\n return None\n \n xe,rem=divmod(xe,n)\n if rem !=0:\n return None\n \n \n a=1 <<-(-_nbits(xc)//n)\n while True :\n q,r=divmod(xc,a **(n -1))\n if a <=q:\n break\n else :\n a=(a *(n -1)+q)//n\n if not (a ==q and r ==0):\n return None\n xc=a\n \n \n \n \n \n \n if xc >1 and m >p *100 //_log10_lb(xc):\n return None\n xc=xc **m\n xe *=m\n if xc >10 **p:\n return None\n \n \n \n \n str_xc=str(xc)\n if other._isinteger()and other._sign ==0:\n ideal_exponent=self._exp *int(other)\n zeros=min(xe -ideal_exponent,p -len(str_xc))\n else :\n zeros=0\n return _dec_from_triple(0,str_xc+'0'*zeros,xe -zeros)\n \n def __pow__(self,other,modulo=None ,context=None ):\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n \n if modulo is not None :\n return self._power_modulo(other,modulo,context)\n \n other=_convert_other(other)\n if other is NotImplemented:\n return other\n \n if context is None :\n context=getcontext()\n \n \n ans=self._check_nans(other,context)\n if ans:\n return ans\n \n \n if not other:\n if not self:\n return context._raise_error(InvalidOperation,'0 ** 0')\n else :\n return _One\n \n \n result_sign=0\n if self._sign ==1:\n if other._isinteger():\n if not other._iseven():\n result_sign=1\n else :\n \n \n if self:\n return context._raise_error(InvalidOperation,\n 'x ** y with x negative and y not an integer')\n \n self=self.copy_negate()\n \n \n if not self:\n if other._sign ==0:\n return _dec_from_triple(result_sign,'0',0)\n else :\n return _SignedInfinity[result_sign]\n \n \n if self._isinfinity():\n if other._sign ==0:\n return _SignedInfinity[result_sign]\n else :\n return _dec_from_triple(result_sign,'0',0)\n \n \n \n \n if self ==_One:\n if other._isinteger():\n \n \n \n \n if other._sign ==1:\n multiplier=0\n elif other >context.prec:\n multiplier=context.prec\n else :\n multiplier=int(other)\n \n exp=self._exp *multiplier\n if exp <1 -context.prec:\n exp=1 -context.prec\n context._raise_error(Rounded)\n else :\n context._raise_error(Inexact)\n context._raise_error(Rounded)\n exp=1 -context.prec\n \n return _dec_from_triple(result_sign,'1'+'0'*-exp,exp)\n \n \n self_adj=self.adjusted()\n \n \n \n if other._isinfinity():\n if (other._sign ==0)==(self_adj <0):\n return _dec_from_triple(result_sign,'0',0)\n else :\n return _SignedInfinity[result_sign]\n \n \n \n ans=None\n exact=False\n \n \n \n \n \n \n bound=self._log10_exp_bound()+other.adjusted()\n if (self_adj >=0)==(other._sign ==0):\n \n \n if bound >=len(str(context.Emax)):\n ans=_dec_from_triple(result_sign,'1',context.Emax+1)\n else :\n \n \n Etiny=context.Etiny()\n if bound >=len(str(-Etiny)):\n ans=_dec_from_triple(result_sign,'1',Etiny -1)\n \n \n if ans is None :\n ans=self._power_exact(other,context.prec+1)\n if ans is not None :\n if result_sign ==1:\n ans=_dec_from_triple(1,ans._int,ans._exp)\n exact=True\n \n \n if ans is None :\n p=context.prec\n x=_WorkRep(self)\n xc,xe=x.int,x.exp\n y=_WorkRep(other)\n yc,ye=y.int,y.exp\n if y.sign ==1:\n yc=-yc\n \n \n \n extra=3\n while True :\n coeff,exp=_dpower(xc,xe,yc,ye,p+extra)\n if coeff %(5 *10 **(len(str(coeff))-p -1)):\n break\n extra +=3\n \n ans=_dec_from_triple(result_sign,str(coeff),exp)\n \n \n \n \n \n \n \n \n \n \n \n \n \n \n if exact and not other._isinteger():\n \n \n if len(ans._int)<=context.prec:\n expdiff=context.prec+1 -len(ans._int)\n ans=_dec_from_triple(ans._sign,ans._int+'0'*expdiff,\n ans._exp -expdiff)\n \n \n newcontext=context.copy()\n newcontext.clear_flags()\n for exception in _signals:\n newcontext.traps[exception]=0\n \n \n ans=ans._fix(newcontext)\n \n \n newcontext._raise_error(Inexact)\n if newcontext.flags[Subnormal]:\n newcontext._raise_error(Underflow)\n \n \n \n \n \n \n if newcontext.flags[Overflow]:\n context._raise_error(Overflow,'above Emax',ans._sign)\n for exception in Underflow,Subnormal,Inexact,Rounded,Clamped:\n if newcontext.flags[exception]:\n context._raise_error(exception)\n \n else :\n ans=ans._fix(context)\n \n return ans\n \n def __rpow__(self,other,context=None ):\n ''\n other=_convert_other(other)\n if other is NotImplemented:\n return other\n return other.__pow__(self,context=context)\n \n def normalize(self,context=None ):\n ''\n \n if context is None :\n context=getcontext()\n \n if self._is_special:\n ans=self._check_nans(context=context)\n if ans:\n return ans\n \n dup=self._fix(context)\n if dup._isinfinity():\n return dup\n \n if not dup:\n return _dec_from_triple(dup._sign,'0',0)\n exp_max=[context.Emax,context.Etop()][context.clamp]\n end=len(dup._int)\n exp=dup._exp\n while dup._int[end -1]=='0'and exp self._exp:\n context._raise_error(Rounded)\n if ans !=self:\n context._raise_error(Inexact)\n return ans\n \n \n if not (context.Etiny()<=exp._exp <=context.Emax):\n return context._raise_error(InvalidOperation,\n 'target exponent out of bounds in quantize')\n \n if not self:\n ans=_dec_from_triple(self._sign,'0',exp._exp)\n return ans._fix(context)\n \n self_adjusted=self.adjusted()\n if self_adjusted >context.Emax:\n return context._raise_error(InvalidOperation,\n 'exponent of quantize result too large for current context')\n if self_adjusted -exp._exp+1 >context.prec:\n return context._raise_error(InvalidOperation,\n 'quantize result has too many digits for current context')\n \n ans=self._rescale(exp._exp,rounding)\n if ans.adjusted()>context.Emax:\n return context._raise_error(InvalidOperation,\n 'exponent of quantize result too large for current context')\n if len(ans._int)>context.prec:\n return context._raise_error(InvalidOperation,\n 'quantize result has too many digits for current context')\n \n \n if ans and ans.adjusted()self._exp:\n if ans !=self:\n context._raise_error(Inexact)\n context._raise_error(Rounded)\n \n \n \n ans=ans._fix(context)\n return ans\n \n def same_quantum(self,other,context=None ):\n ''\n\n\n\n\n\n\n \n other=_convert_other(other,raiseit=True )\n if self._is_special or other._is_special:\n return (self.is_nan()and other.is_nan()or\n self.is_infinite()and other.is_infinite())\n return self._exp ==other._exp\n \n def _rescale(self,exp,rounding):\n ''\n\n\n\n\n\n\n\n\n \n if self._is_special:\n return Decimal(self)\n if not self:\n return _dec_from_triple(self._sign,'0',exp)\n \n if self._exp >=exp:\n \n return _dec_from_triple(self._sign,\n self._int+'0'*(self._exp -exp),exp)\n \n \n \n digits=len(self._int)+self._exp -exp\n if digits <0:\n self=_dec_from_triple(self._sign,'1',exp -1)\n digits=0\n this_function=self._pick_rounding_function[rounding]\n changed=this_function(self,digits)\n coeff=self._int[:digits]or'0'\n if changed ==1:\n coeff=str(int(coeff)+1)\n return _dec_from_triple(self._sign,coeff,exp)\n \n def _round(self,places,rounding):\n ''\n\n\n\n\n\n\n\n \n if places <=0:\n raise ValueError(\"argument should be at least 1 in _round\")\n if self._is_special or not self:\n return Decimal(self)\n ans=self._rescale(self.adjusted()+1 -places,rounding)\n \n \n \n \n if ans.adjusted()!=self.adjusted():\n ans=ans._rescale(ans.adjusted()+1 -places,rounding)\n return ans\n \n def to_integral_exact(self,rounding=None ,context=None ):\n ''\n\n\n\n\n\n\n\n \n if self._is_special:\n ans=self._check_nans(context=context)\n if ans:\n return ans\n return Decimal(self)\n if self._exp >=0:\n return Decimal(self)\n if not self:\n return _dec_from_triple(self._sign,'0',0)\n if context is None :\n context=getcontext()\n if rounding is None :\n rounding=context.rounding\n ans=self._rescale(0,rounding)\n if ans !=self:\n context._raise_error(Inexact)\n context._raise_error(Rounded)\n return ans\n \n def to_integral_value(self,rounding=None ,context=None ):\n ''\n if context is None :\n context=getcontext()\n if rounding is None :\n rounding=context.rounding\n if self._is_special:\n ans=self._check_nans(context=context)\n if ans:\n return ans\n return Decimal(self)\n if self._exp >=0:\n return Decimal(self)\n else :\n return self._rescale(0,rounding)\n \n \n to_integral=to_integral_value\n \n def sqrt(self,context=None ):\n ''\n if context is None :\n context=getcontext()\n \n if self._is_special:\n ans=self._check_nans(context=context)\n if ans:\n return ans\n \n if self._isinfinity()and self._sign ==0:\n return Decimal(self)\n \n if not self:\n \n ans=_dec_from_triple(self._sign,'0',self._exp //2)\n return ans._fix(context)\n \n if self._sign ==1:\n return context._raise_error(InvalidOperation,'sqrt(-x), x > 0')\n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n prec=context.prec+1\n \n \n \n \n \n op=_WorkRep(self)\n e=op.exp >>1\n if op.exp&1:\n c=op.int *10\n l=(len(self._int)>>1)+1\n else :\n c=op.int\n l=len(self._int)+1 >>1\n \n \n shift=prec -l\n if shift >=0:\n c *=100 **shift\n exact=True\n else :\n c,remainder=divmod(c,100 **-shift)\n exact=not remainder\n e -=shift\n \n \n n=10 **prec\n while True :\n q=c //n\n if n <=q:\n break\n else :\n n=n+q >>1\n exact=exact and n *n ==c\n \n if exact:\n \n if shift >=0:\n \n n //=10 **shift\n else :\n n *=10 **-shift\n e +=shift\n else :\n \n if n %5 ==0:\n n +=1\n \n ans=_dec_from_triple(0,str(n),e)\n \n \n context=context._shallow_copy()\n rounding=context._set_rounding(ROUND_HALF_EVEN)\n ans=ans._fix(context)\n context.rounding=rounding\n \n return ans\n \n def max(self,other,context=None ):\n ''\n\n\n\n \n other=_convert_other(other,raiseit=True )\n \n if context is None :\n context=getcontext()\n \n if self._is_special or other._is_special:\n \n \n sn=self._isnan()\n on=other._isnan()\n if sn or on:\n if on ==1 and sn ==0:\n return self._fix(context)\n if sn ==1 and on ==0:\n return other._fix(context)\n return self._check_nans(other,context)\n \n c=self._cmp(other)\n if c ==0:\n \n \n \n \n \n \n \n \n c=self.compare_total(other)\n \n if c ==-1:\n ans=other\n else :\n ans=self\n \n return ans._fix(context)\n \n def min(self,other,context=None ):\n ''\n\n\n\n \n other=_convert_other(other,raiseit=True )\n \n if context is None :\n context=getcontext()\n \n if self._is_special or other._is_special:\n \n \n sn=self._isnan()\n on=other._isnan()\n if sn or on:\n if on ==1 and sn ==0:\n return self._fix(context)\n if sn ==1 and on ==0:\n return other._fix(context)\n return self._check_nans(other,context)\n \n c=self._cmp(other)\n if c ==0:\n c=self.compare_total(other)\n \n if c ==-1:\n ans=self\n else :\n ans=other\n \n return ans._fix(context)\n \n def _isinteger(self):\n ''\n if self._is_special:\n return False\n if self._exp >=0:\n return True\n rest=self._int[self._exp:]\n return rest =='0'*len(rest)\n \n def _iseven(self):\n ''\n if not self or self._exp >0:\n return True\n return self._int[-1+self._exp]in'02468'\n \n def adjusted(self):\n ''\n try :\n return self._exp+len(self._int)-1\n \n except TypeError:\n return 0\n \n def canonical(self):\n ''\n\n\n\n \n return self\n \n def compare_signal(self,other,context=None ):\n ''\n\n\n\n \n other=_convert_other(other,raiseit=True )\n ans=self._compare_check_nans(other,context)\n if ans:\n return ans\n return self.compare(other,context=context)\n \n def compare_total(self,other,context=None ):\n ''\n\n\n\n\n \n other=_convert_other(other,raiseit=True )\n \n \n if self._sign and not other._sign:\n return _NegativeOne\n if not self._sign and other._sign:\n return _One\n sign=self._sign\n \n \n self_nan=self._isnan()\n other_nan=other._isnan()\n if self_nan or other_nan:\n if self_nan ==other_nan:\n \n self_key=len(self._int),self._int\n other_key=len(other._int),other._int\n if self_key other_key:\n if sign:\n return _NegativeOne\n else :\n return _One\n return _Zero\n \n if sign:\n if self_nan ==1:\n return _NegativeOne\n if other_nan ==1:\n return _One\n if self_nan ==2:\n return _NegativeOne\n if other_nan ==2:\n return _One\n else :\n if self_nan ==1:\n return _One\n if other_nan ==1:\n return _NegativeOne\n if self_nan ==2:\n return _One\n if other_nan ==2:\n return _NegativeOne\n \n if self other:\n return _One\n \n if self._exp other._exp:\n if sign:\n return _NegativeOne\n else :\n return _One\n return _Zero\n \n \n def compare_total_mag(self,other,context=None ):\n ''\n\n\n \n other=_convert_other(other,raiseit=True )\n \n s=self.copy_abs()\n o=other.copy_abs()\n return s.compare_total(o)\n \n def copy_abs(self):\n ''\n return _dec_from_triple(0,self._int,self._exp,self._is_special)\n \n def copy_negate(self):\n ''\n if self._sign:\n return _dec_from_triple(0,self._int,self._exp,self._is_special)\n else :\n return _dec_from_triple(1,self._int,self._exp,self._is_special)\n \n def copy_sign(self,other,context=None ):\n ''\n other=_convert_other(other,raiseit=True )\n return _dec_from_triple(other._sign,self._int,\n self._exp,self._is_special)\n \n def exp(self,context=None ):\n ''\n \n if context is None :\n context=getcontext()\n \n \n ans=self._check_nans(context=context)\n if ans:\n return ans\n \n \n if self._isinfinity()==-1:\n return _Zero\n \n \n if not self:\n return _One\n \n \n if self._isinfinity()==1:\n return Decimal(self)\n \n \n \n \n \n p=context.prec\n adj=self.adjusted()\n \n \n \n \n \n \n if self._sign ==0 and adj >len(str((context.Emax+1)*3)):\n \n ans=_dec_from_triple(0,'1',context.Emax+1)\n elif self._sign ==1 and adj >len(str((-context.Etiny()+1)*3)):\n \n ans=_dec_from_triple(0,'1',context.Etiny()-1)\n elif self._sign ==0 and adj <-p:\n \n ans=_dec_from_triple(0,'1'+'0'*(p -1)+'1',-p)\n elif self._sign ==1 and adj <-p -1:\n \n ans=_dec_from_triple(0,'9'*(p+1),-p -1)\n \n else :\n op=_WorkRep(self)\n c,e=op.int,op.exp\n if op.sign ==1:\n c=-c\n \n \n \n \n extra=3\n while True :\n coeff,exp=_dexp(c,e,p+extra)\n if coeff %(5 *10 **(len(str(coeff))-p -1)):\n break\n extra +=3\n \n ans=_dec_from_triple(0,str(coeff),exp)\n \n \n \n context=context._shallow_copy()\n rounding=context._set_rounding(ROUND_HALF_EVEN)\n ans=ans._fix(context)\n context.rounding=rounding\n \n return ans\n \n def is_canonical(self):\n ''\n\n\n\n \n return True\n \n def is_finite(self):\n ''\n\n\n\n \n return not self._is_special\n \n def is_infinite(self):\n ''\n return self._exp =='F'\n \n def is_nan(self):\n ''\n return self._exp in ('n','N')\n \n def is_normal(self,context=None ):\n ''\n if self._is_special or not self:\n return False\n if context is None :\n context=getcontext()\n return context.Emin <=self.adjusted()\n \n def is_qnan(self):\n ''\n return self._exp =='n'\n \n def is_signed(self):\n ''\n return self._sign ==1\n \n def is_snan(self):\n ''\n return self._exp =='N'\n \n def is_subnormal(self,context=None ):\n ''\n if self._is_special or not self:\n return False\n if context is None :\n context=getcontext()\n return self.adjusted()=1:\n \n return len(str(adj *23 //10))-1\n if adj <=-2:\n \n return len(str((-1 -adj)*23 //10))-1\n op=_WorkRep(self)\n c,e=op.int,op.exp\n if adj ==0:\n \n num=str(c -10 **-e)\n den=str(c)\n return len(num)-len(den)-(num =1:\n \n return len(str(adj))-1\n if adj <=-2:\n \n return len(str(-1 -adj))-1\n op=_WorkRep(self)\n c,e=op.int,op.exp\n if adj ==0:\n \n num=str(c -10 **-e)\n den=str(231 *c)\n return len(num)-len(den)-(num 0:\n opa='0'*dif+opa\n elif dif <0:\n opa=opa[-context.prec:]\n dif=context.prec -len(opb)\n if dif >0:\n opb='0'*dif+opb\n elif dif <0:\n opb=opb[-context.prec:]\n return opa,opb\n \n def logical_and(self,other,context=None ):\n ''\n if context is None :\n context=getcontext()\n \n other=_convert_other(other,raiseit=True )\n \n if not self._islogical()or not other._islogical():\n return context._raise_error(InvalidOperation)\n \n \n (opa,opb)=self._fill_logical(context,self._int,other._int)\n \n \n result=\"\".join([str(int(a)&int(b))for a,b in zip(opa,opb)])\n return _dec_from_triple(0,result.lstrip('0')or'0',0)\n \n def logical_invert(self,context=None ):\n ''\n if context is None :\n context=getcontext()\n return self.logical_xor(_dec_from_triple(0,'1'*context.prec,0),\n context)\n \n def logical_or(self,other,context=None ):\n ''\n if context is None :\n context=getcontext()\n \n other=_convert_other(other,raiseit=True )\n \n if not self._islogical()or not other._islogical():\n return context._raise_error(InvalidOperation)\n \n \n (opa,opb)=self._fill_logical(context,self._int,other._int)\n \n \n result=\"\".join([str(int(a)|int(b))for a,b in zip(opa,opb)])\n return _dec_from_triple(0,result.lstrip('0')or'0',0)\n \n def logical_xor(self,other,context=None ):\n ''\n if context is None :\n context=getcontext()\n \n other=_convert_other(other,raiseit=True )\n \n if not self._islogical()or not other._islogical():\n return context._raise_error(InvalidOperation)\n \n \n (opa,opb)=self._fill_logical(context,self._int,other._int)\n \n \n result=\"\".join([str(int(a)^int(b))for a,b in zip(opa,opb)])\n return _dec_from_triple(0,result.lstrip('0')or'0',0)\n \n def max_mag(self,other,context=None ):\n ''\n other=_convert_other(other,raiseit=True )\n \n if context is None :\n context=getcontext()\n \n if self._is_special or other._is_special:\n \n \n sn=self._isnan()\n on=other._isnan()\n if sn or on:\n if on ==1 and sn ==0:\n return self._fix(context)\n if sn ==1 and on ==0:\n return other._fix(context)\n return self._check_nans(other,context)\n \n c=self.copy_abs()._cmp(other.copy_abs())\n if c ==0:\n c=self.compare_total(other)\n \n if c ==-1:\n ans=other\n else :\n ans=self\n \n return ans._fix(context)\n \n def min_mag(self,other,context=None ):\n ''\n other=_convert_other(other,raiseit=True )\n \n if context is None :\n context=getcontext()\n \n if self._is_special or other._is_special:\n \n \n sn=self._isnan()\n on=other._isnan()\n if sn or on:\n if on ==1 and sn ==0:\n return self._fix(context)\n if sn ==1 and on ==0:\n return other._fix(context)\n return self._check_nans(other,context)\n \n c=self.copy_abs()._cmp(other.copy_abs())\n if c ==0:\n c=self.compare_total(other)\n \n if c ==-1:\n ans=self\n else :\n ans=other\n \n return ans._fix(context)\n \n def next_minus(self,context=None ):\n ''\n if context is None :\n context=getcontext()\n \n ans=self._check_nans(context=context)\n if ans:\n return ans\n \n if self._isinfinity()==-1:\n return _NegativeInfinity\n if self._isinfinity()==1:\n return _dec_from_triple(0,'9'*context.prec,context.Etop())\n \n context=context.copy()\n context._set_rounding(ROUND_FLOOR)\n context._ignore_all_flags()\n new_self=self._fix(context)\n if new_self !=self:\n return new_self\n return self.__sub__(_dec_from_triple(0,'1',context.Etiny()-1),\n context)\n \n def next_plus(self,context=None ):\n ''\n if context is None :\n context=getcontext()\n \n ans=self._check_nans(context=context)\n if ans:\n return ans\n \n if self._isinfinity()==1:\n return _Infinity\n if self._isinfinity()==-1:\n return _dec_from_triple(1,'9'*context.prec,context.Etop())\n \n context=context.copy()\n context._set_rounding(ROUND_CEILING)\n context._ignore_all_flags()\n new_self=self._fix(context)\n if new_self !=self:\n return new_self\n return self.__add__(_dec_from_triple(0,'1',context.Etiny()-1),\n context)\n \n def next_toward(self,other,context=None ):\n ''\n\n\n\n\n\n\n \n other=_convert_other(other,raiseit=True )\n \n if context is None :\n context=getcontext()\n \n ans=self._check_nans(other,context)\n if ans:\n return ans\n \n comparison=self._cmp(other)\n if comparison ==0:\n return self.copy_sign(other)\n \n if comparison ==-1:\n ans=self.next_plus(context)\n else :\n ans=self.next_minus(context)\n \n \n if ans._isinfinity():\n context._raise_error(Overflow,\n 'Infinite result from next_toward',\n ans._sign)\n context._raise_error(Inexact)\n context._raise_error(Rounded)\n elif ans.adjusted()0:\n rotdig='0'*topad+rotdig\n elif topad <0:\n rotdig=rotdig[-topad:]\n \n \n rotated=rotdig[torot:]+rotdig[:torot]\n return _dec_from_triple(self._sign,\n rotated.lstrip('0')or'0',self._exp)\n \n def scaleb(self,other,context=None ):\n ''\n if context is None :\n context=getcontext()\n \n other=_convert_other(other,raiseit=True )\n \n ans=self._check_nans(other,context)\n if ans:\n return ans\n \n if other._exp !=0:\n return context._raise_error(InvalidOperation)\n liminf=-2 *(context.Emax+context.prec)\n limsup=2 *(context.Emax+context.prec)\n if not (liminf <=int(other)<=limsup):\n return context._raise_error(InvalidOperation)\n \n if self._isinfinity():\n return Decimal(self)\n \n d=_dec_from_triple(self._sign,self._int,self._exp+int(other))\n d=d._fix(context)\n return d\n \n def shift(self,other,context=None ):\n ''\n if context is None :\n context=getcontext()\n \n other=_convert_other(other,raiseit=True )\n \n ans=self._check_nans(other,context)\n if ans:\n return ans\n \n if other._exp !=0:\n return context._raise_error(InvalidOperation)\n if not (-context.prec <=int(other)<=context.prec):\n return context._raise_error(InvalidOperation)\n \n if self._isinfinity():\n return Decimal(self)\n \n \n torot=int(other)\n rotdig=self._int\n topad=context.prec -len(rotdig)\n if topad >0:\n rotdig='0'*topad+rotdig\n elif topad <0:\n rotdig=rotdig[-topad:]\n \n \n if torot <0:\n shifted=rotdig[:torot]\n else :\n shifted=rotdig+'0'*torot\n shifted=shifted[-context.prec:]\n \n return _dec_from_triple(self._sign,\n shifted.lstrip('0')or'0',self._exp)\n \n \n def __reduce__(self):\n return (self.__class__,(str(self),))\n \n def __copy__(self):\n if type(self)is Decimal:\n return self\n return self.__class__(str(self))\n \n def __deepcopy__(self,memo):\n if type(self)is Decimal:\n return self\n return self.__class__(str(self))\n \n \n \n def __format__(self,specifier,context=None ,_localeconv=None ):\n ''\n\n\n\n\n\n\n \n \n \n \n \n \n \n if context is None :\n context=getcontext()\n \n spec=_parse_format_specifier(specifier,_localeconv=_localeconv)\n \n \n if self._is_special:\n sign=_format_sign(self._sign,spec)\n body=str(self.copy_abs())\n return _format_align(sign,body,spec)\n \n \n if spec['type']is None :\n spec['type']=['g','G'][context.capitals]\n \n \n if spec['type']=='%':\n self=_dec_from_triple(self._sign,self._int,self._exp+2)\n \n \n rounding=context.rounding\n precision=spec['precision']\n if precision is not None :\n if spec['type']in'eE':\n self=self._round(precision+1,rounding)\n elif spec['type']in'fF%':\n self=self._rescale(-precision,rounding)\n elif spec['type']in'gG'and len(self._int)>precision:\n self=self._round(precision,rounding)\n \n \n if not self and self._exp >0 and spec['type']in'fF%':\n self=self._rescale(0,rounding)\n \n \n leftdigits=self._exp+len(self._int)\n if spec['type']in'eE':\n if not self and precision is not None :\n dotplace=1 -precision\n else :\n dotplace=1\n elif spec['type']in'fF%':\n dotplace=leftdigits\n elif spec['type']in'gG':\n if self._exp <=0 and leftdigits >-6:\n dotplace=leftdigits\n else :\n dotplace=1\n \n \n if dotplace <0:\n intpart='0'\n fracpart='0'*(-dotplace)+self._int\n elif dotplace >len(self._int):\n intpart=self._int+'0'*(dotplace -len(self._int))\n fracpart=''\n else :\n intpart=self._int[:dotplace]or'0'\n fracpart=self._int[dotplace:]\n exp=leftdigits -dotplace\n \n \n \n return _format_number(self._sign,intpart,fracpart,exp,spec)\n \ndef _dec_from_triple(sign,coefficient,exponent,special=False ):\n ''\n\n\n\n\n \n \n self=object.__new__(Decimal)\n self._sign=sign\n self._int=coefficient\n self._exp=exponent\n self._is_special=special\n \n return self\n \n \n \n \n_numbers.Number.register(Decimal)\n\n\n\n\nclass _ContextManager(object):\n ''\n\n\n\n \n def __init__(self,new_context):\n self.new_context=new_context.copy()\n def __enter__(self):\n self.saved_context=getcontext()\n setcontext(self.new_context)\n return self.new_context\n def __exit__(self,t,v,tb):\n setcontext(self.saved_context)\n \nclass Context(object):\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n \n def __init__(self,prec=None ,rounding=None ,Emin=None ,Emax=None ,\n capitals=None ,clamp=None ,flags=None ,traps=None ,\n _ignored_flags=None ):\n \n \n try :\n dc=DefaultContext\n except NameError:\n pass\n \n self.prec=prec if prec is not None else dc.prec\n self.rounding=rounding if rounding is not None else dc.rounding\n self.Emin=Emin if Emin is not None else dc.Emin\n self.Emax=Emax if Emax is not None else dc.Emax\n self.capitals=capitals if capitals is not None else dc.capitals\n self.clamp=clamp if clamp is not None else dc.clamp\n \n if _ignored_flags is None :\n self._ignored_flags=[]\n else :\n self._ignored_flags=_ignored_flags\n \n if traps is None :\n self.traps=dc.traps.copy()\n elif not isinstance(traps,dict):\n self.traps=dict((s,int(s in traps))for s in _signals+traps)\n else :\n self.traps=traps\n \n if flags is None :\n self.flags=dict.fromkeys(_signals,0)\n elif not isinstance(flags,dict):\n self.flags=dict((s,int(s in flags))for s in _signals+flags)\n else :\n self.flags=flags\n \n def _set_integer_check(self,name,value,vmin,vmax):\n if not isinstance(value,int):\n raise TypeError(\"%s must be an integer\"%name)\n if vmin =='-inf':\n if value >vmax:\n raise ValueError(\"%s must be in [%s, %d]. got: %s\"%(name,vmin,vmax,value))\n elif vmax =='inf':\n if value vmax:\n raise ValueError(\"%s must be in [%d, %d]. got %s\"%(name,vmin,vmax,value))\n return object.__setattr__(self,name,value)\n \n def _set_signal_dict(self,name,d):\n if not isinstance(d,dict):\n raise TypeError(\"%s must be a signal dict\"%d)\n for key in d:\n if not key in _signals:\n raise KeyError(\"%s is not a valid signal dict\"%d)\n for key in _signals:\n if not key in d:\n raise KeyError(\"%s is not a valid signal dict\"%d)\n return object.__setattr__(self,name,d)\n \n def __setattr__(self,name,value):\n if name =='prec':\n return self._set_integer_check(name,value,1,'inf')\n elif name =='Emin':\n return self._set_integer_check(name,value,'-inf',0)\n elif name =='Emax':\n return self._set_integer_check(name,value,0,'inf')\n elif name =='capitals':\n return self._set_integer_check(name,value,0,1)\n elif name =='clamp':\n return self._set_integer_check(name,value,0,1)\n elif name =='rounding':\n if not value in _rounding_modes:\n \n \n raise TypeError(\"%s: invalid rounding mode\"%value)\n return object.__setattr__(self,name,value)\n elif name =='flags'or name =='traps':\n return self._set_signal_dict(name,value)\n elif name =='_ignored_flags':\n return object.__setattr__(self,name,value)\n else :\n raise AttributeError(\n \"'decimal.Context' object has no attribute '%s'\"%name)\n \n def __delattr__(self,name):\n raise AttributeError(\"%s cannot be deleted\"%name)\n \n \n def __reduce__(self):\n flags=[sig for sig,v in self.flags.items()if v]\n traps=[sig for sig,v in self.traps.items()if v]\n return (self.__class__,\n (self.prec,self.rounding,self.Emin,self.Emax,\n self.capitals,self.clamp,flags,traps))\n \n def __repr__(self):\n ''\n s=[]\n s.append('Context(prec=%(prec)d, rounding=%(rounding)s, '\n 'Emin=%(Emin)d, Emax=%(Emax)d, capitals=%(capitals)d, '\n 'clamp=%(clamp)d'\n %vars(self))\n names=[f.__name__ for f,v in self.flags.items()if v]\n s.append('flags=['+', '.join(names)+']')\n names=[t.__name__ for t,v in self.traps.items()if v]\n s.append('traps=['+', '.join(names)+']')\n return', '.join(s)+')'\n \n def clear_flags(self):\n ''\n for flag in self.flags:\n self.flags[flag]=0\n \n def clear_traps(self):\n ''\n for flag in self.traps:\n self.traps[flag]=0\n \n def _shallow_copy(self):\n ''\n nc=Context(self.prec,self.rounding,self.Emin,self.Emax,\n self.capitals,self.clamp,self.flags,self.traps,\n self._ignored_flags)\n return nc\n \n def copy(self):\n ''\n nc=Context(self.prec,self.rounding,self.Emin,self.Emax,\n self.capitals,self.clamp,\n self.flags.copy(),self.traps.copy(),\n self._ignored_flags)\n return nc\n __copy__=copy\n \n def _raise_error(self,condition,explanation=None ,*args):\n ''\n\n\n\n\n\n \n error=_condition_map.get(condition,condition)\n if error in self._ignored_flags:\n \n return error().handle(self,*args)\n \n self.flags[error]=1\n if not self.traps[error]:\n \n return condition().handle(self,*args)\n \n \n \n raise error(explanation)\n \n def _ignore_all_flags(self):\n ''\n return self._ignore_flags(*_signals)\n \n def _ignore_flags(self,*flags):\n ''\n \n \n self._ignored_flags=(self._ignored_flags+list(flags))\n return list(flags)\n \n def _regard_flags(self,*flags):\n ''\n if flags and isinstance(flags[0],(tuple,list)):\n flags=flags[0]\n for flag in flags:\n self._ignored_flags.remove(flag)\n \n \n __hash__=None\n \n def Etiny(self):\n ''\n return int(self.Emin -self.prec+1)\n \n def Etop(self):\n ''\n return int(self.Emax -self.prec+1)\n \n def _set_rounding(self,type):\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n \n rounding=self.rounding\n self.rounding=type\n return rounding\n \n def create_decimal(self,num='0'):\n ''\n\n\n \n \n if isinstance(num,str)and num !=num.strip():\n return self._raise_error(ConversionSyntax,\n \"no trailing or leading whitespace is \"\n \"permitted.\")\n \n d=Decimal(num,context=self)\n if d._isnan()and len(d._int)>self.prec -self.clamp:\n return self._raise_error(ConversionSyntax,\n \"diagnostic info too long in NaN\")\n return d._fix(self)\n \n def create_decimal_from_float(self,f):\n ''\n\n\n\n\n\n\n\n\n\n\n\n \n d=Decimal.from_float(f)\n return d._fix(self)\n \n \n def abs(self,a):\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n a=_convert_other(a,raiseit=True )\n return a.__abs__(context=self)\n \n def add(self,a,b):\n ''\n\n\n\n\n\n\n\n\n\n\n\n \n a=_convert_other(a,raiseit=True )\n r=a.__add__(b,context=self)\n if r is NotImplemented:\n raise TypeError(\"Unable to convert %s to Decimal\"%b)\n else :\n return r\n \n def _apply(self,a):\n return str(a._fix(self))\n \n def canonical(self,a):\n ''\n\n\n\n\n\n\n \n if not isinstance(a,Decimal):\n raise TypeError(\"canonical requires a Decimal as an argument.\")\n return a.canonical()\n \n def compare(self,a,b):\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n a=_convert_other(a,raiseit=True )\n return a.compare(b,context=self)\n \n def compare_signal(self,a,b):\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n a=_convert_other(a,raiseit=True )\n return a.compare_signal(b,context=self)\n \n def compare_total(self,a,b):\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n a=_convert_other(a,raiseit=True )\n return a.compare_total(b)\n \n def compare_total_mag(self,a,b):\n ''\n\n\n \n a=_convert_other(a,raiseit=True )\n return a.compare_total_mag(b)\n \n def copy_abs(self,a):\n ''\n\n\n\n\n\n\n\n \n a=_convert_other(a,raiseit=True )\n return a.copy_abs()\n \n def copy_decimal(self,a):\n ''\n\n\n\n\n\n\n\n \n a=_convert_other(a,raiseit=True )\n return Decimal(a)\n \n def copy_negate(self,a):\n ''\n\n\n\n\n\n\n\n \n a=_convert_other(a,raiseit=True )\n return a.copy_negate()\n \n def copy_sign(self,a,b):\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n a=_convert_other(a,raiseit=True )\n return a.copy_sign(b)\n \n def divide(self,a,b):\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n a=_convert_other(a,raiseit=True )\n r=a.__truediv__(b,context=self)\n if r is NotImplemented:\n raise TypeError(\"Unable to convert %s to Decimal\"%b)\n else :\n return r\n \n def divide_int(self,a,b):\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n a=_convert_other(a,raiseit=True )\n r=a.__floordiv__(b,context=self)\n if r is NotImplemented:\n raise TypeError(\"Unable to convert %s to Decimal\"%b)\n else :\n return r\n \n def divmod(self,a,b):\n ''\n\n\n\n\n\n\n\n\n\n\n\n \n a=_convert_other(a,raiseit=True )\n r=a.__divmod__(b,context=self)\n if r is NotImplemented:\n raise TypeError(\"Unable to convert %s to Decimal\"%b)\n else :\n return r\n \n def exp(self,a):\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n a=_convert_other(a,raiseit=True )\n return a.exp(context=self)\n \n def fma(self,a,b,c):\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n a=_convert_other(a,raiseit=True )\n return a.fma(b,c,context=self)\n \n def is_canonical(self,a):\n ''\n\n\n\n\n\n\n \n if not isinstance(a,Decimal):\n raise TypeError(\"is_canonical requires a Decimal as an argument.\")\n return a.is_canonical()\n \n def is_finite(self,a):\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n a=_convert_other(a,raiseit=True )\n return a.is_finite()\n \n def is_infinite(self,a):\n ''\n\n\n\n\n\n\n\n\n\n \n a=_convert_other(a,raiseit=True )\n return a.is_infinite()\n \n def is_nan(self,a):\n ''\n\n\n\n\n\n\n\n\n\n\n \n a=_convert_other(a,raiseit=True )\n return a.is_nan()\n \n def is_normal(self,a):\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n a=_convert_other(a,raiseit=True )\n return a.is_normal(context=self)\n \n def is_qnan(self,a):\n ''\n\n\n\n\n\n\n\n\n\n \n a=_convert_other(a,raiseit=True )\n return a.is_qnan()\n \n def is_signed(self,a):\n ''\n\n\n\n\n\n\n\n\n\n\n\n \n a=_convert_other(a,raiseit=True )\n return a.is_signed()\n \n def is_snan(self,a):\n ''\n\n\n\n\n\n\n\n\n\n\n \n a=_convert_other(a,raiseit=True )\n return a.is_snan()\n \n def is_subnormal(self,a):\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n a=_convert_other(a,raiseit=True )\n return a.is_subnormal(context=self)\n \n def is_zero(self,a):\n ''\n\n\n\n\n\n\n\n\n\n\n\n \n a=_convert_other(a,raiseit=True )\n return a.is_zero()\n \n def ln(self,a):\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n a=_convert_other(a,raiseit=True )\n return a.ln(context=self)\n \n def log10(self,a):\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n a=_convert_other(a,raiseit=True )\n return a.log10(context=self)\n \n def logb(self,a):\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n a=_convert_other(a,raiseit=True )\n return a.logb(context=self)\n \n def logical_and(self,a,b):\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n a=_convert_other(a,raiseit=True )\n return a.logical_and(b,context=self)\n \n def logical_invert(self,a):\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n a=_convert_other(a,raiseit=True )\n return a.logical_invert(context=self)\n \n def logical_or(self,a,b):\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n a=_convert_other(a,raiseit=True )\n return a.logical_or(b,context=self)\n \n def logical_xor(self,a,b):\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n a=_convert_other(a,raiseit=True )\n return a.logical_xor(b,context=self)\n \n def max(self,a,b):\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n a=_convert_other(a,raiseit=True )\n return a.max(b,context=self)\n \n def max_mag(self,a,b):\n ''\n\n\n\n\n\n\n\n\n\n\n\n \n a=_convert_other(a,raiseit=True )\n return a.max_mag(b,context=self)\n \n def min(self,a,b):\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n a=_convert_other(a,raiseit=True )\n return a.min(b,context=self)\n \n def min_mag(self,a,b):\n ''\n\n\n\n\n\n\n\n\n\n\n\n \n a=_convert_other(a,raiseit=True )\n return a.min_mag(b,context=self)\n \n def minus(self,a):\n ''\n\n\n\n\n\n\n\n\n\n\n\n \n a=_convert_other(a,raiseit=True )\n return a.__neg__(context=self)\n \n def multiply(self,a,b):\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n a=_convert_other(a,raiseit=True )\n r=a.__mul__(b,context=self)\n if r is NotImplemented:\n raise TypeError(\"Unable to convert %s to Decimal\"%b)\n else :\n return r\n \n def next_minus(self,a):\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n a=_convert_other(a,raiseit=True )\n return a.next_minus(context=self)\n \n def next_plus(self,a):\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n a=_convert_other(a,raiseit=True )\n return a.next_plus(context=self)\n \n def next_toward(self,a,b):\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n a=_convert_other(a,raiseit=True )\n return a.next_toward(b,context=self)\n \n def normalize(self,a):\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n a=_convert_other(a,raiseit=True )\n return a.normalize(context=self)\n \n def number_class(self,a):\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n a=_convert_other(a,raiseit=True )\n return a.number_class(context=self)\n \n def plus(self,a):\n ''\n\n\n\n\n\n\n\n\n\n\n\n \n a=_convert_other(a,raiseit=True )\n return a.__pos__(context=self)\n \n def power(self,a,b,modulo=None ):\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n a=_convert_other(a,raiseit=True )\n r=a.__pow__(b,modulo,context=self)\n if r is NotImplemented:\n raise TypeError(\"Unable to convert %s to Decimal\"%b)\n else :\n return r\n \n def quantize(self,a,b):\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n a=_convert_other(a,raiseit=True )\n return a.quantize(b,context=self)\n \n def radix(self):\n ''\n\n\n\n \n return Decimal(10)\n \n def remainder(self,a,b):\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n a=_convert_other(a,raiseit=True )\n r=a.__mod__(b,context=self)\n if r is NotImplemented:\n raise TypeError(\"Unable to convert %s to Decimal\"%b)\n else :\n return r\n \n def remainder_near(self,a,b):\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n a=_convert_other(a,raiseit=True )\n return a.remainder_near(b,context=self)\n \n def rotate(self,a,b):\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n a=_convert_other(a,raiseit=True )\n return a.rotate(b,context=self)\n \n def same_quantum(self,a,b):\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n a=_convert_other(a,raiseit=True )\n return a.same_quantum(b)\n \n def scaleb(self,a,b):\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n a=_convert_other(a,raiseit=True )\n return a.scaleb(b,context=self)\n \n def shift(self,a,b):\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n a=_convert_other(a,raiseit=True )\n return a.shift(b,context=self)\n \n def sqrt(self,a):\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n a=_convert_other(a,raiseit=True )\n return a.sqrt(context=self)\n \n def subtract(self,a,b):\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n a=_convert_other(a,raiseit=True )\n r=a.__sub__(b,context=self)\n if r is NotImplemented:\n raise TypeError(\"Unable to convert %s to Decimal\"%b)\n else :\n return r\n \n def to_eng_string(self,a):\n ''\n\n\n \n a=_convert_other(a,raiseit=True )\n return a.to_eng_string(context=self)\n \n def to_sci_string(self,a):\n ''\n\n\n \n a=_convert_other(a,raiseit=True )\n return a.__str__(context=self)\n \n def to_integral_exact(self,a):\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n a=_convert_other(a,raiseit=True )\n return a.to_integral_exact(context=self)\n \n def to_integral_value(self,a):\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n a=_convert_other(a,raiseit=True )\n return a.to_integral_value(context=self)\n \n \n to_integral=to_integral_value\n \nclass _WorkRep(object):\n __slots__=('sign','int','exp')\n \n \n \n \n def __init__(self,value=None ):\n if value is None :\n self.sign=None\n self.int=0\n self.exp=None\n elif isinstance(value,Decimal):\n self.sign=value._sign\n self.int=int(value._int)\n self.exp=value._exp\n else :\n \n self.sign=value[0]\n self.int=value[1]\n self.exp=value[2]\n \n def __repr__(self):\n return\"(%r, %r, %r)\"%(self.sign,self.int,self.exp)\n \n __str__=__repr__\n \n \n \ndef _normalize(op1,op2,prec=0):\n ''\n\n\n \n if op1.exp =0:\n return n *10 **e\n else :\n \n str_n=str(abs(n))\n val_n=len(str_n)-len(str_n.rstrip('0'))\n return None if val_n <-e else n //10 **-e\n \ndef _sqrt_nearest(n,a):\n ''\n\n\n\n\n \n if n <=0 or a <=0:\n raise ValueError(\"Both arguments to _sqrt_nearest should be positive.\")\n \n b=0\n while a !=b:\n b,a=a,a --n //a >>1\n return a\n \ndef _rshift_nearest(x,shift):\n ''\n\n\n \n b,q=1 <>shift\n return q+(2 *(x&(b -1))+(q&1)>b)\n \ndef _div_nearest(a,b):\n ''\n\n\n \n q,r=divmod(a,b)\n return q+(2 *r+(q&1)>b)\n \ndef _ilog(x,M,L=8):\n ''\n\n\n\n\n\n\n\n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n y=x -M\n \n R=0\n while (R <=L and abs(y)<=M or\n R >L and abs(y)>>R -L >=M):\n y=_div_nearest((M *y)<<1,\n M+_sqrt_nearest(M *(M+_rshift_nearest(y,R)),M))\n R +=1\n \n \n T=-int(-10 *len(str(M))//(3 *L))\n yshift=_rshift_nearest(y,R)\n w=_div_nearest(M,T)\n for k in range(T -1,0,-1):\n w=_div_nearest(M,k)-_div_nearest(yshift *w,M)\n \n return _div_nearest(w *y,M)\n \ndef _dlog10(c,e,p):\n ''\n\n \n \n \n \n p +=2\n \n \n \n \n \n l=len(str(c))\n f=e+l -(e+l >=1)\n \n if p >0:\n M=10 **p\n k=e+p -f\n if k >=0:\n c *=10 **k\n else :\n c=_div_nearest(c,10 **-k)\n \n log_d=_ilog(c,M)\n log_10=_log10_digits(p)\n log_d=_div_nearest(log_d *M,log_10)\n log_tenpower=f *M\n else :\n log_d=0\n log_tenpower=_div_nearest(f,10 **-p)\n \n return _div_nearest(log_tenpower+log_d,100)\n \ndef _dlog(c,e,p):\n ''\n\n \n \n \n \n p +=2\n \n \n \n \n l=len(str(c))\n f=e+l -(e+l >=1)\n \n \n if p >0:\n k=e+p -f\n if k >=0:\n c *=10 **k\n else :\n c=_div_nearest(c,10 **-k)\n \n \n log_d=_ilog(c,10 **p)\n else :\n \n log_d=0\n \n \n if f:\n extra=len(str(abs(f)))-1\n if p+extra >=0:\n \n \n f_log_ten=_div_nearest(f *_log10_digits(p+extra),10 **extra)\n else :\n f_log_ten=0\n else :\n f_log_ten=0\n \n \n return _div_nearest(f_log_ten+log_d,100)\n \nclass _Log10Memoize(object):\n ''\n\n \n def __init__(self):\n self.digits=\"23025850929940456840179914546843642076011014886\"\n \n def getdigits(self,p):\n ''\n\n\n \n \n \n \n \n if p <0:\n raise ValueError(\"p should be nonnegative\")\n \n if p >=len(self.digits):\n \n \n extra=3\n while True :\n \n M=10 **(p+extra+2)\n digits=str(_div_nearest(_ilog(10 *M,M),100))\n if digits[-extra:]!='0'*extra:\n break\n extra +=3\n \n \n self.digits=digits.rstrip('0')[:-1]\n return int(self.digits[:p+1])\n \n_log10_digits=_Log10Memoize().getdigits\n\ndef _iexp(x,M,L=8):\n ''\n\n\n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n R=_nbits((x <=0:\n cshift=c *10 **shift\n else :\n cshift=c //10 **-shift\n quot,rem=divmod(cshift,_log10_digits(q))\n \n \n rem=_div_nearest(rem,10 **extra)\n \n \n return _div_nearest(_iexp(rem,10 **p),1000),quot -p+3\n \ndef _dpower(xc,xe,yc,ye,p):\n ''\n\n\n\n\n\n\n\n\n\n\n\n \n \n \n b=len(str(abs(yc)))+ye\n \n \n lxc=_dlog(xc,xe,p+b+1)\n \n \n shift=ye -b\n if shift >=0:\n pc=lxc *yc *10 **shift\n else :\n pc=_div_nearest(lxc *yc,10 **-shift)\n \n if pc ==0:\n \n \n if ((len(str(xc))+xe >=1)==(yc >0)):\n coeff,exp=10 **(p -1)+1,1 -p\n else :\n coeff,exp=10 **p -1,-p\n else :\n coeff,exp=_dexp(pc,-(p+1),p+1)\n coeff=_div_nearest(coeff,10)\n exp +=1\n \n return coeff,exp\n \ndef _log10_lb(c,correction={\n'1':100,'2':70,'3':53,'4':40,'5':31,\n'6':23,'7':16,'8':10,'9':5}):\n ''\n if c <=0:\n raise ValueError(\"The argument to _log10_lb should be nonnegative.\")\n str_c=str(c)\n return 100 *len(str_c)-correction[str_c[0]]\n \n \n \ndef _convert_other(other,raiseit=False ,allow_float=False ):\n ''\n\n\n\n\n\n \n if isinstance(other,Decimal):\n return other\n if isinstance(other,int):\n return Decimal(other)\n if allow_float and isinstance(other,float):\n return Decimal.from_float(other)\n \n if raiseit:\n raise TypeError(\"Unable to convert %s to Decimal\"%other)\n return NotImplemented\n \ndef _convert_for_comparison(self,other,equality_op=False ):\n ''\n\n\n\n\n \n if isinstance(other,Decimal):\n return self,other\n \n \n \n \n \n if isinstance(other,_numbers.Rational):\n if not self._is_special:\n self=_dec_from_triple(self._sign,\n str(int(self._int)*other.denominator),\n self._exp)\n return self,Decimal(other.numerator)\n \n \n \n \n if equality_op and isinstance(other,_numbers.Complex)and other.imag ==0:\n other=other.real\n if isinstance(other,float):\n context=getcontext()\n if equality_op:\n context.flags[FloatOperation]=1\n else :\n context._raise_error(FloatOperation,\n \"strict semantics for mixing floats and Decimals are enabled\")\n return self,Decimal.from_float(other)\n return NotImplemented,NotImplemented\n \n \n \n \n \n \n \nDefaultContext=Context(\nprec=17,rounding=ROUND_HALF_EVEN,\ntraps=[DivisionByZero,Overflow,InvalidOperation],\nflags=[],\nEmax=308,\nEmin=-324,\ncapitals=1,\nclamp=0\n)\n\n\n\n\n\n\nBasicContext=Context(\nprec=9,rounding=ROUND_HALF_UP,\ntraps=[DivisionByZero,Overflow,InvalidOperation,Clamped,Underflow],\nflags=[],\n)\n\nExtendedContext=Context(\nprec=9,rounding=ROUND_HALF_EVEN,\ntraps=[],\nflags=[],\n)\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n_all_zeros=re.compile('0*$').match\n_exact_half=re.compile('50*$').match\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\ndel re\n\n\n\n\ntry :\n import locale as _locale\nexcept ImportError:\n pass\n \ndef _parse_format_specifier(format_spec,_localeconv=None ):\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n m=_parse_format_specifier_regex.match(format_spec)\n if m is None :\n raise ValueError(\"Invalid format specifier: \"+format_spec)\n \n \n format_dict=m.groupdict()\n \n \n \n fill=format_dict['fill']\n align=format_dict['align']\n format_dict['zeropad']=(format_dict['zeropad']is not None )\n if format_dict['zeropad']:\n if fill is not None :\n raise ValueError(\"Fill character conflicts with '0'\"\n \" in format specifier: \"+format_spec)\n if align is not None :\n raise ValueError(\"Alignment conflicts with '0' in \"\n \"format specifier: \"+format_spec)\n format_dict['fill']=fill or' '\n \n \n \n format_dict['align']=align or'>'\n \n \n if format_dict['sign']is None :\n format_dict['sign']='-'\n \n \n format_dict['minimumwidth']=int(format_dict['minimumwidth']or'0')\n if format_dict['precision']is not None :\n format_dict['precision']=int(format_dict['precision'])\n \n \n \n if format_dict['precision']==0:\n if format_dict['type']is None or format_dict['type']in'gGn':\n format_dict['precision']=1\n \n \n \n if format_dict['type']=='n':\n \n format_dict['type']='g'\n if _localeconv is None :\n _localeconv=_locale.localeconv()\n if format_dict['thousands_sep']is not None :\n raise ValueError(\"Explicit thousands separator conflicts with \"\n \"'n' type in format specifier: \"+format_spec)\n format_dict['thousands_sep']=_localeconv['thousands_sep']\n format_dict['grouping']=_localeconv['grouping']\n format_dict['decimal_point']=_localeconv['decimal_point']\n else :\n if format_dict['thousands_sep']is None :\n format_dict['thousands_sep']=''\n format_dict['grouping']=[3,0]\n format_dict['decimal_point']='.'\n \n return format_dict\n \ndef _format_align(sign,body,spec):\n ''\n\n\n\n\n \n \n minimumwidth=spec['minimumwidth']\n fill=spec['fill']\n padding=fill *(minimumwidth -len(sign)-len(body))\n \n align=spec['align']\n if align =='<':\n result=sign+body+padding\n elif align =='>':\n result=padding+sign+body\n elif align =='=':\n result=sign+padding+body\n elif align =='^':\n half=len(padding)//2\n result=padding[:half]+sign+body+padding[half:]\n else :\n raise ValueError('Unrecognised alignment field')\n \n return result\n \ndef _group_lengths(grouping):\n ''\n\n\n \n \n \n \n \n \n \n \n \n from itertools import chain,repeat\n if not grouping:\n return []\n elif grouping[-1]==0 and len(grouping)>=2:\n return chain(grouping[:-1],repeat(grouping[-2]))\n elif grouping[-1]==_locale.CHAR_MAX:\n return grouping[:-1]\n else :\n raise ValueError('unrecognised format for grouping')\n \ndef _insert_thousands_sep(digits,spec,min_width=1):\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n \n sep=spec['thousands_sep']\n grouping=spec['grouping']\n \n groups=[]\n for l in _group_lengths(grouping):\n if l <=0:\n raise ValueError(\"group length should be positive\")\n \n l=min(max(len(digits),min_width,1),l)\n groups.append('0'*(l -len(digits))+digits[-l:])\n digits=digits[:-l]\n min_width -=l\n if not digits and min_width <=0:\n break\n min_width -=len(sep)\n else :\n l=max(len(digits),min_width,1)\n groups.append('0'*(l -len(digits))+digits[-l:])\n return sep.join(reversed(groups))\n \ndef _format_sign(is_negative,spec):\n ''\n \n if is_negative:\n return'-'\n elif spec['sign']in' +':\n return spec['sign']\n else :\n return''\n \ndef _format_number(is_negative,intpart,fracpart,exp,spec):\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n \n sign=_format_sign(is_negative,spec)\n \n if fracpart or spec['alt']:\n fracpart=spec['decimal_point']+fracpart\n \n if exp !=0 or spec['type']in'eE':\n echar={'E':'E','e':'e','G':'E','g':'e'}[spec['type']]\n fracpart +=\"{0}{1:+}\".format(echar,exp)\n if spec['type']=='%':\n fracpart +='%'\n \n if spec['zeropad']:\n min_width=spec['minimumwidth']-len(fracpart)-len(sign)\n else :\n min_width=0\n intpart=_insert_thousands_sep(intpart,spec,min_width)\n \n return _format_align(sign,intpart+fracpart,spec)\n \n \n \n \n \n_Infinity=Decimal('Inf')\n_NegativeInfinity=Decimal('-Inf')\n_NaN=Decimal('NaN')\n_Zero=Decimal(0)\n_One=Decimal(1)\n_NegativeOne=Decimal(-1)\n\n\n_SignedInfinity=(_Infinity,_NegativeInfinity)\n\n\n\n_PyHASH_MODULUS=sys.hash_info.modulus\n\n_PyHASH_INF=sys.hash_info.inf\n_PyHASH_NAN=sys.hash_info.nan\n\n\n_PyHASH_10INV=pow(10,_PyHASH_MODULUS -2,_PyHASH_MODULUS)\ndel sys\n\ntry :\n import _decimal\nexcept ImportError:\n pass\nelse :\n s1=set(dir())\n s2=set(dir(_decimal))\n for name in s1 -s2:\n del globals()[name]\n del s1,s2,name\n from _decimal import *\n \n \n \n \n \n \n"], "encodings.hp_roman8": [".py", "''\n\n\n\n\n\n\n\n\nimport codecs\n\n\n\nclass Codec(codecs.Codec):\n\n def encode(self,input,errors='strict'):\n return codecs.charmap_encode(input,errors,encoding_table)\n \n def decode(self,input,errors='strict'):\n return codecs.charmap_decode(input,errors,decoding_table)\n \nclass IncrementalEncoder(codecs.IncrementalEncoder):\n def encode(self,input,final=False ):\n return codecs.charmap_encode(input,self.errors,encoding_table)[0]\n \nclass IncrementalDecoder(codecs.IncrementalDecoder):\n def decode(self,input,final=False ):\n return codecs.charmap_decode(input,self.errors,decoding_table)[0]\n \nclass StreamWriter(Codec,codecs.StreamWriter):\n pass\n \nclass StreamReader(Codec,codecs.StreamReader):\n pass\n \n \n \ndef getregentry():\n return codecs.CodecInfo(\n name='hp-roman8',\n encode=Codec().encode,\n decode=Codec().decode,\n incrementalencoder=IncrementalEncoder,\n incrementaldecoder=IncrementalDecoder,\n streamwriter=StreamWriter,\n streamreader=StreamReader,\n )\n \n \n \n \ndecoding_table=(\n'\\x00'\n'\\x01'\n'\\x02'\n'\\x03'\n'\\x04'\n'\\x05'\n'\\x06'\n'\\x07'\n'\\x08'\n'\\t'\n'\\n'\n'\\x0b'\n'\\x0c'\n'\\r'\n'\\x0e'\n'\\x0f'\n'\\x10'\n'\\x11'\n'\\x12'\n'\\x13'\n'\\x14'\n'\\x15'\n'\\x16'\n'\\x17'\n'\\x18'\n'\\x19'\n'\\x1a'\n'\\x1b'\n'\\x1c'\n'\\x1d'\n'\\x1e'\n'\\x1f'\n' '\n'!'\n'\"'\n'#'\n'$'\n'%'\n'&'\n\"'\"\n'('\n')'\n'*'\n'+'\n','\n'-'\n'.'\n'/'\n'0'\n'1'\n'2'\n'3'\n'4'\n'5'\n'6'\n'7'\n'8'\n'9'\n':'\n';'\n'<'\n'='\n'>'\n'?'\n'@'\n'A'\n'B'\n'C'\n'D'\n'E'\n'F'\n'G'\n'H'\n'I'\n'J'\n'K'\n'L'\n'M'\n'N'\n'O'\n'P'\n'Q'\n'R'\n'S'\n'T'\n'U'\n'V'\n'W'\n'X'\n'Y'\n'Z'\n'['\n'\\\\'\n']'\n'^'\n'_'\n'`'\n'a'\n'b'\n'c'\n'd'\n'e'\n'f'\n'g'\n'h'\n'i'\n'j'\n'k'\n'l'\n'm'\n'n'\n'o'\n'p'\n'q'\n'r'\n's'\n't'\n'u'\n'v'\n'w'\n'x'\n'y'\n'z'\n'{'\n'|'\n'}'\n'~'\n'\\x7f'\n'\\x80'\n'\\x81'\n'\\x82'\n'\\x83'\n'\\x84'\n'\\x85'\n'\\x86'\n'\\x87'\n'\\x88'\n'\\x89'\n'\\x8a'\n'\\x8b'\n'\\x8c'\n'\\x8d'\n'\\x8e'\n'\\x8f'\n'\\x90'\n'\\x91'\n'\\x92'\n'\\x93'\n'\\x94'\n'\\x95'\n'\\x96'\n'\\x97'\n'\\x98'\n'\\x99'\n'\\x9a'\n'\\x9b'\n'\\x9c'\n'\\x9d'\n'\\x9e'\n'\\x9f'\n'\\xa0'\n'\\xc0'\n'\\xc2'\n'\\xc8'\n'\\xca'\n'\\xcb'\n'\\xce'\n'\\xcf'\n'\\xb4'\n'\\u02cb'\n'\\u02c6'\n'\\xa8'\n'\\u02dc'\n'\\xd9'\n'\\xdb'\n'\\u20a4'\n'\\xaf'\n'\\xdd'\n'\\xfd'\n'\\xb0'\n'\\xc7'\n'\\xe7'\n'\\xd1'\n'\\xf1'\n'\\xa1'\n'\\xbf'\n'\\xa4'\n'\\xa3'\n'\\xa5'\n'\\xa7'\n'\\u0192'\n'\\xa2'\n'\\xe2'\n'\\xea'\n'\\xf4'\n'\\xfb'\n'\\xe1'\n'\\xe9'\n'\\xf3'\n'\\xfa'\n'\\xe0'\n'\\xe8'\n'\\xf2'\n'\\xf9'\n'\\xe4'\n'\\xeb'\n'\\xf6'\n'\\xfc'\n'\\xc5'\n'\\xee'\n'\\xd8'\n'\\xc6'\n'\\xe5'\n'\\xed'\n'\\xf8'\n'\\xe6'\n'\\xc4'\n'\\xec'\n'\\xd6'\n'\\xdc'\n'\\xc9'\n'\\xef'\n'\\xdf'\n'\\xd4'\n'\\xc1'\n'\\xc3'\n'\\xe3'\n'\\xd0'\n'\\xf0'\n'\\xcd'\n'\\xcc'\n'\\xd3'\n'\\xd2'\n'\\xd5'\n'\\xf5'\n'\\u0160'\n'\\u0161'\n'\\xda'\n'\\u0178'\n'\\xff'\n'\\xde'\n'\\xfe'\n'\\xb7'\n'\\xb5'\n'\\xb6'\n'\\xbe'\n'\\u2014'\n'\\xbc'\n'\\xbd'\n'\\xaa'\n'\\xba'\n'\\xab'\n'\\u25a0'\n'\\xbb'\n'\\xb1'\n'\\ufffe'\n)\n\n\nencoding_table=codecs.charmap_build(decoding_table)\n"], "logging.handlers": [".py", "\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\"\"\"\nAdditional handlers for the logging package for Python. The core package is\nbased on PEP 282 and comments thereto in comp.lang.python.\n\nCopyright (C) 2001-2013 Vinay Sajip. All Rights Reserved.\n\nTo use, simply 'import logging.handlers' and log away!\n\"\"\"\n\nimport errno,logging,socket,os,pickle,struct,time,re\nfrom codecs import BOM_UTF8\nfrom stat import ST_DEV,ST_INO,ST_MTIME\nimport queue\ntry :\n import threading\nexcept ImportError:\n threading=None\n \n \nfrom .brython_handlers import XMLHTTPHandler\n\n\n\n\nDEFAULT_TCP_LOGGING_PORT=9020\nDEFAULT_UDP_LOGGING_PORT=9021\nDEFAULT_HTTP_LOGGING_PORT=9022\nDEFAULT_SOAP_LOGGING_PORT=9023\nSYSLOG_UDP_PORT=514\nSYSLOG_TCP_PORT=514\n\n_MIDNIGHT=24 *60 *60\n\nclass BaseRotatingHandler(logging.FileHandler):\n ''\n\n\n\n \n def __init__(self,filename,mode,encoding=None ,delay=False ):\n ''\n\n \n logging.FileHandler.__init__(self,filename,mode,encoding,delay)\n self.mode=mode\n self.encoding=encoding\n self.namer=None\n self.rotator=None\n \n def emit(self,record):\n ''\n\n\n\n\n \n try :\n if self.shouldRollover(record):\n self.doRollover()\n logging.FileHandler.emit(self,record)\n except (KeyboardInterrupt,SystemExit):\n raise\n except :\n self.handleError(record)\n \n def rotation_filename(self,default_name):\n ''\n\n\n\n\n\n\n\n\n\n\n \n if not callable(self.namer):\n result=default_name\n else :\n result=self.namer(default_name)\n return result\n \n def rotate(self,source,dest):\n ''\n\n\n\n\n\n\n\n\n\n\n\n \n if not callable(self.rotator):\n \n if os.path.exists(source):\n os.rename(source,dest)\n else :\n self.rotator(source,dest)\n \nclass RotatingFileHandler(BaseRotatingHandler):\n ''\n\n\n \n def __init__(self,filename,mode='a',maxBytes=0,backupCount=0,encoding=None ,delay=False ):\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n \n \n \n \n \n if maxBytes >0:\n mode='a'\n BaseRotatingHandler.__init__(self,filename,mode,encoding,delay)\n self.maxBytes=maxBytes\n self.backupCount=backupCount\n \n def doRollover(self):\n ''\n\n \n if self.stream:\n self.stream.close()\n self.stream=None\n if self.backupCount >0:\n for i in range(self.backupCount -1,0,-1):\n sfn=self.rotation_filename(\"%s.%d\"%(self.baseFilename,i))\n dfn=self.rotation_filename(\"%s.%d\"%(self.baseFilename,\n i+1))\n if os.path.exists(sfn):\n if os.path.exists(dfn):\n os.remove(dfn)\n os.rename(sfn,dfn)\n dfn=self.rotation_filename(self.baseFilename+\".1\")\n if os.path.exists(dfn):\n os.remove(dfn)\n self.rotate(self.baseFilename,dfn)\n if not self.delay:\n self.stream=self._open()\n \n def shouldRollover(self,record):\n ''\n\n\n\n\n \n if self.stream is None :\n self.stream=self._open()\n if self.maxBytes >0:\n msg=\"%s\\n\"%self.format(record)\n self.stream.seek(0,2)\n if self.stream.tell()+len(msg)>=self.maxBytes:\n return 1\n return 0\n \nclass TimedRotatingFileHandler(BaseRotatingHandler):\n ''\n\n\n\n\n\n \n def __init__(self,filename,when='h',interval=1,backupCount=0,encoding=None ,delay=False ,utc=False ):\n BaseRotatingHandler.__init__(self,filename,'a',encoding,delay)\n self.when=when.upper()\n self.backupCount=backupCount\n self.utc=utc\n \n \n \n \n \n \n \n \n \n \n \n \n if self.when =='S':\n self.interval=1\n self.suffix=\"%Y-%m-%d_%H-%M-%S\"\n self.extMatch=r\"^\\d{4}-\\d{2}-\\d{2}_\\d{2}-\\d{2}-\\d{2}(\\.\\w+)?$\"\n elif self.when =='M':\n self.interval=60\n self.suffix=\"%Y-%m-%d_%H-%M\"\n self.extMatch=r\"^\\d{4}-\\d{2}-\\d{2}_\\d{2}-\\d{2}(\\.\\w+)?$\"\n elif self.when =='H':\n self.interval=60 *60\n self.suffix=\"%Y-%m-%d_%H\"\n self.extMatch=r\"^\\d{4}-\\d{2}-\\d{2}_\\d{2}(\\.\\w+)?$\"\n elif self.when =='D'or self.when =='MIDNIGHT':\n self.interval=60 *60 *24\n self.suffix=\"%Y-%m-%d\"\n self.extMatch=r\"^\\d{4}-\\d{2}-\\d{2}(\\.\\w+)?$\"\n elif self.when.startswith('W'):\n self.interval=60 *60 *24 *7\n if len(self.when)!=2:\n raise ValueError(\"You must specify a day for weekly rollover from 0 to 6 (0 is Monday): %s\"%self.when)\n if self.when[1]<'0'or self.when[1]>'6':\n raise ValueError(\"Invalid day specified for weekly rollover: %s\"%self.when)\n self.dayOfWeek=int(self.when[1])\n self.suffix=\"%Y-%m-%d\"\n self.extMatch=r\"^\\d{4}-\\d{2}-\\d{2}(\\.\\w+)?$\"\n else :\n raise ValueError(\"Invalid rollover interval specified: %s\"%self.when)\n \n self.extMatch=re.compile(self.extMatch,re.ASCII)\n self.interval=self.interval *interval\n if os.path.exists(filename):\n t=os.stat(filename)[ST_MTIME]\n else :\n t=int(time.time())\n self.rolloverAt=self.computeRollover(t)\n \n def computeRollover(self,currentTime):\n ''\n\n \n result=currentTime+self.interval\n \n \n \n \n \n \n \n if self.when =='MIDNIGHT'or self.when.startswith('W'):\n \n if self.utc:\n t=time.gmtime(currentTime)\n else :\n t=time.localtime(currentTime)\n currentHour=t[3]\n currentMinute=t[4]\n currentSecond=t[5]\n \n r=_MIDNIGHT -((currentHour *60+currentMinute)*60+\n currentSecond)\n result=currentTime+r\n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n if self.when.startswith('W'):\n day=t[6]\n if day !=self.dayOfWeek:\n if day =self.rolloverAt:\n return 1\n return 0\n \n def getFilesToDelete(self):\n ''\n\n\n\n \n dirName,baseName=os.path.split(self.baseFilename)\n fileNames=os.listdir(dirName)\n result=[]\n prefix=baseName+\".\"\n plen=len(prefix)\n for fileName in fileNames:\n if fileName[:plen]==prefix:\n suffix=fileName[plen:]\n if self.extMatch.match(suffix):\n result.append(os.path.join(dirName,fileName))\n result.sort()\n if len(result)0:\n for s in self.getFilesToDelete():\n os.remove(s)\n if not self.delay:\n self.stream=self._open()\n newRolloverAt=self.computeRollover(currentTime)\n while newRolloverAt <=currentTime:\n newRolloverAt=newRolloverAt+self.interval\n \n if (self.when =='MIDNIGHT'or self.when.startswith('W'))and not self.utc:\n dstAtRollover=time.localtime(newRolloverAt)[-1]\n if dstNow !=dstAtRollover:\n if not dstNow:\n addend=-3600\n else :\n addend=3600\n newRolloverAt +=addend\n self.rolloverAt=newRolloverAt\n \nclass WatchedFileHandler(logging.FileHandler):\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n def __init__(self,filename,mode='a',encoding=None ,delay=False ):\n logging.FileHandler.__init__(self,filename,mode,encoding,delay)\n self.dev,self.ino=-1,-1\n self._statstream()\n \n def _statstream(self):\n if self.stream:\n sres=os.fstat(self.stream.fileno())\n self.dev,self.ino=sres[ST_DEV],sres[ST_INO]\n \n def emit(self,record):\n ''\n\n\n\n\n\n \n \n \n \n \n try :\n \n sres=os.stat(self.baseFilename)\n except OSError as err:\n if err.errno ==errno.ENOENT:\n sres=None\n else :\n raise\n \n if not sres or sres[ST_DEV]!=self.dev or sres[ST_INO]!=self.ino:\n if self.stream is not None :\n \n self.stream.flush()\n self.stream.close()\n \n self.stream=self._open()\n self._statstream()\n logging.FileHandler.emit(self,record)\n \n \nclass SocketHandler(logging.Handler):\n ''\n\n\n\n\n\n\n\n\n\n \n \n def __init__(self,host,port):\n ''\n\n\n\n\n\n \n logging.Handler.__init__(self)\n self.host=host\n self.port=port\n self.sock=None\n self.closeOnError=False\n self.retryTime=None\n \n \n \n self.retryStart=1.0\n self.retryMax=30.0\n self.retryFactor=2.0\n \n def makeSocket(self,timeout=1):\n ''\n\n\n \n s=socket.socket(socket.AF_INET,socket.SOCK_STREAM)\n if hasattr(s,'settimeout'):\n s.settimeout(timeout)\n try :\n s.connect((self.host,self.port))\n return s\n except socket.error:\n s.close()\n raise\n \n def createSocket(self):\n ''\n\n\n\n \n now=time.time()\n \n \n \n if self.retryTime is None :\n attempt=True\n else :\n attempt=(now >=self.retryTime)\n if attempt:\n try :\n self.sock=self.makeSocket()\n self.retryTime=None\n except socket.error:\n \n if self.retryTime is None :\n self.retryPeriod=self.retryStart\n else :\n self.retryPeriod=self.retryPeriod *self.retryFactor\n if self.retryPeriod >self.retryMax:\n self.retryPeriod=self.retryMax\n self.retryTime=now+self.retryPeriod\n \n def send(self,s):\n ''\n\n\n\n\n \n if self.sock is None :\n self.createSocket()\n \n \n \n if self.sock:\n try :\n if hasattr(self.sock,\"sendall\"):\n self.sock.sendall(s)\n else :\n sentsofar=0\n left=len(s)\n while left >0:\n sent=self.sock.send(s[sentsofar:])\n sentsofar=sentsofar+sent\n left=left -sent\n except socket.error:\n self.sock.close()\n self.sock=None\n \n def makePickle(self,record):\n ''\n\n\n \n ei=record.exc_info\n if ei:\n \n dummy=self.format(record)\n \n \n \n d=dict(record.__dict__)\n d['msg']=record.getMessage()\n d['args']=None\n d['exc_info']=None\n s=pickle.dumps(d,1)\n slen=struct.pack(\">L\",len(s))\n return slen+s\n \n def handleError(self,record):\n ''\n\n\n\n\n\n \n if self.closeOnError and self.sock:\n self.sock.close()\n self.sock=None\n else :\n logging.Handler.handleError(self,record)\n \n def emit(self,record):\n ''\n\n\n\n\n\n\n \n try :\n s=self.makePickle(record)\n self.send(s)\n except (KeyboardInterrupt,SystemExit):\n raise\n except :\n self.handleError(record)\n \n def close(self):\n ''\n\n \n self.acquire()\n try :\n if self.sock:\n self.sock.close()\n self.sock=None\n logging.Handler.close(self)\n finally :\n self.release()\n \nclass DatagramHandler(SocketHandler):\n ''\n\n\n\n\n\n\n\n\n \n def __init__(self,host,port):\n ''\n\n \n SocketHandler.__init__(self,host,port)\n self.closeOnError=False\n \n def makeSocket(self):\n ''\n\n\n \n s=socket.socket(socket.AF_INET,socket.SOCK_DGRAM)\n return s\n \n def send(self,s):\n ''\n\n\n\n\n\n \n if self.sock is None :\n self.createSocket()\n self.sock.sendto(s,(self.host,self.port))\n \nclass SysLogHandler(logging.Handler):\n ''\n\n\n\n\n\n \n \n \n \n \n \n \n \n \n \n \n \n LOG_EMERG=0\n LOG_ALERT=1\n LOG_CRIT=2\n LOG_ERR=3\n LOG_WARNING=4\n LOG_NOTICE=5\n LOG_INFO=6\n LOG_DEBUG=7\n \n \n LOG_KERN=0\n LOG_USER=1\n LOG_MAIL=2\n LOG_DAEMON=3\n LOG_AUTH=4\n LOG_SYSLOG=5\n LOG_LPR=6\n LOG_NEWS=7\n LOG_UUCP=8\n LOG_CRON=9\n LOG_AUTHPRIV=10\n LOG_FTP=11\n \n \n LOG_LOCAL0=16\n LOG_LOCAL1=17\n LOG_LOCAL2=18\n LOG_LOCAL3=19\n LOG_LOCAL4=20\n LOG_LOCAL5=21\n LOG_LOCAL6=22\n LOG_LOCAL7=23\n \n priority_names={\n \"alert\":LOG_ALERT,\n \"crit\":LOG_CRIT,\n \"critical\":LOG_CRIT,\n \"debug\":LOG_DEBUG,\n \"emerg\":LOG_EMERG,\n \"err\":LOG_ERR,\n \"error\":LOG_ERR,\n \"info\":LOG_INFO,\n \"notice\":LOG_NOTICE,\n \"panic\":LOG_EMERG,\n \"warn\":LOG_WARNING,\n \"warning\":LOG_WARNING,\n }\n \n facility_names={\n \"auth\":LOG_AUTH,\n \"authpriv\":LOG_AUTHPRIV,\n \"cron\":LOG_CRON,\n \"daemon\":LOG_DAEMON,\n \"ftp\":LOG_FTP,\n \"kern\":LOG_KERN,\n \"lpr\":LOG_LPR,\n \"mail\":LOG_MAIL,\n \"news\":LOG_NEWS,\n \"security\":LOG_AUTH,\n \"syslog\":LOG_SYSLOG,\n \"user\":LOG_USER,\n \"uucp\":LOG_UUCP,\n \"local0\":LOG_LOCAL0,\n \"local1\":LOG_LOCAL1,\n \"local2\":LOG_LOCAL2,\n \"local3\":LOG_LOCAL3,\n \"local4\":LOG_LOCAL4,\n \"local5\":LOG_LOCAL5,\n \"local6\":LOG_LOCAL6,\n \"local7\":LOG_LOCAL7,\n }\n \n \n \n \n \n priority_map={\n \"DEBUG\":\"debug\",\n \"INFO\":\"info\",\n \"WARNING\":\"warning\",\n \"ERROR\":\"error\",\n \"CRITICAL\":\"critical\"\n }\n \n def __init__(self,address=('localhost',SYSLOG_UDP_PORT),\n facility=LOG_USER,socktype=None ):\n ''\n\n\n\n\n\n \n logging.Handler.__init__(self)\n \n self.address=address\n self.facility=facility\n self.socktype=socktype\n \n if isinstance(address,str):\n self.unixsocket=True\n self._connect_unixsocket(address)\n else :\n self.unixsocket=False\n if socktype is None :\n socktype=socket.SOCK_DGRAM\n self.socket=socket.socket(socket.AF_INET,socktype)\n if socktype ==socket.SOCK_STREAM:\n self.socket.connect(address)\n self.socktype=socktype\n self.formatter=None\n \n def _connect_unixsocket(self,address):\n use_socktype=self.socktype\n if use_socktype is None :\n use_socktype=socket.SOCK_DGRAM\n self.socket=socket.socket(socket.AF_UNIX,use_socktype)\n try :\n self.socket.connect(address)\n \n self.socktype=use_socktype\n except socket.error:\n self.socket.close()\n if self.socktype is not None :\n \n raise\n use_socktype=socket.SOCK_STREAM\n self.socket=socket.socket(socket.AF_UNIX,use_socktype)\n try :\n self.socket.connect(address)\n \n self.socktype=use_socktype\n except socket.error:\n self.socket.close()\n raise\n \n def encodePriority(self,facility,priority):\n ''\n\n\n\n\n \n if isinstance(facility,str):\n facility=self.facility_names[facility]\n if isinstance(priority,str):\n priority=self.priority_names[priority]\n return (facility <<3)|priority\n \n def close(self):\n ''\n\n \n self.acquire()\n try :\n self.socket.close()\n logging.Handler.close(self)\n finally :\n self.release()\n \n def mapPriority(self,levelName):\n ''\n\n\n\n\n\n \n return self.priority_map.get(levelName,\"warning\")\n \n ident=''\n append_nul=True\n \n def emit(self,record):\n ''\n\n\n\n\n \n msg=self.format(record)\n if self.ident:\n msg=self.ident+msg\n if self.append_nul:\n msg +='\\000'\n ''\n\n\n \n prio='<%d>'%self.encodePriority(self.facility,\n self.mapPriority(record.levelname))\n prio=prio.encode('utf-8')\n \n msg=msg.encode('utf-8')\n msg=prio+msg\n try :\n if self.unixsocket:\n try :\n self.socket.send(msg)\n except socket.error:\n self.socket.close()\n self._connect_unixsocket(self.address)\n self.socket.send(msg)\n elif self.socktype ==socket.SOCK_DGRAM:\n self.socket.sendto(msg,self.address)\n else :\n self.socket.sendall(msg)\n except (KeyboardInterrupt,SystemExit):\n raise\n except :\n self.handleError(record)\n \nclass SMTPHandler(logging.Handler):\n ''\n\n \n def __init__(self,mailhost,fromaddr,toaddrs,subject,\n credentials=None ,secure=None ,timeout=5.0):\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n logging.Handler.__init__(self)\n if isinstance(mailhost,tuple):\n self.mailhost,self.mailport=mailhost\n else :\n self.mailhost,self.mailport=mailhost,None\n if isinstance(credentials,tuple):\n self.username,self.password=credentials\n else :\n self.username=None\n self.fromaddr=fromaddr\n if isinstance(toaddrs,str):\n toaddrs=[toaddrs]\n self.toaddrs=toaddrs\n self.subject=subject\n self.secure=secure\n self.timeout=timeout\n \n def getSubject(self,record):\n ''\n\n\n\n\n \n return self.subject\n \n def emit(self,record):\n ''\n\n\n\n \n try :\n import smtplib\n from email.utils import formatdate\n port=self.mailport\n if not port:\n port=smtplib.SMTP_PORT\n smtp=smtplib.SMTP(self.mailhost,port,timeout=self.timeout)\n msg=self.format(record)\n msg=\"From: %s\\r\\nTo: %s\\r\\nSubject: %s\\r\\nDate: %s\\r\\n\\r\\n%s\"%(\n self.fromaddr,\n \",\".join(self.toaddrs),\n self.getSubject(record),\n formatdate(),msg)\n if self.username:\n if self.secure is not None :\n smtp.ehlo()\n smtp.starttls(*self.secure)\n smtp.ehlo()\n smtp.login(self.username,self.password)\n smtp.sendmail(self.fromaddr,self.toaddrs,msg)\n smtp.quit()\n except (KeyboardInterrupt,SystemExit):\n raise\n except :\n self.handleError(record)\n \nclass NTEventLogHandler(logging.Handler):\n ''\n\n\n\n\n\n\n\n \n def __init__(self,appname,dllname=None ,logtype=\"Application\"):\n logging.Handler.__init__(self)\n try :\n import win32evtlogutil,win32evtlog\n self.appname=appname\n self._welu=win32evtlogutil\n if not dllname:\n dllname=os.path.split(self._welu.__file__)\n dllname=os.path.split(dllname[0])\n dllname=os.path.join(dllname[0],r'win32service.pyd')\n self.dllname=dllname\n self.logtype=logtype\n self._welu.AddSourceToRegistry(appname,dllname,logtype)\n self.deftype=win32evtlog.EVENTLOG_ERROR_TYPE\n self.typemap={\n logging.DEBUG:win32evtlog.EVENTLOG_INFORMATION_TYPE,\n logging.INFO:win32evtlog.EVENTLOG_INFORMATION_TYPE,\n logging.WARNING:win32evtlog.EVENTLOG_WARNING_TYPE,\n logging.ERROR:win32evtlog.EVENTLOG_ERROR_TYPE,\n logging.CRITICAL:win32evtlog.EVENTLOG_ERROR_TYPE,\n }\n except ImportError:\n print(\"The Python Win32 extensions for NT (service, event \" \"logging) appear not to be available.\")\n self._welu=None\n \n def getMessageID(self,record):\n ''\n\n\n\n\n\n \n return 1\n \n def getEventCategory(self,record):\n ''\n\n\n\n\n \n return 0\n \n def getEventType(self,record):\n ''\n\n\n\n\n\n\n\n\n \n return self.typemap.get(record.levelno,self.deftype)\n \n def emit(self,record):\n ''\n\n\n\n\n \n if self._welu:\n try :\n id=self.getMessageID(record)\n cat=self.getEventCategory(record)\n type=self.getEventType(record)\n msg=self.format(record)\n self._welu.ReportEvent(self.appname,id,cat,type,[msg])\n except (KeyboardInterrupt,SystemExit):\n raise\n except :\n self.handleError(record)\n \n def close(self):\n ''\n\n\n\n\n\n\n\n \n \n logging.Handler.close(self)\n \nclass HTTPHandler(logging.Handler):\n ''\n\n\n \n def __init__(self,host,url,method=\"GET\",secure=False ,credentials=None ):\n ''\n\n\n \n logging.Handler.__init__(self)\n method=method.upper()\n if method not in [\"GET\",\"POST\"]:\n raise ValueError(\"method must be GET or POST\")\n self.host=host\n self.url=url\n self.method=method\n self.secure=secure\n self.credentials=credentials\n \n def mapLogRecord(self,record):\n ''\n\n\n\n \n return record.__dict__\n \n def emit(self,record):\n ''\n\n\n\n \n try :\n import http.client,urllib.parse\n host=self.host\n if self.secure:\n h=http.client.HTTPSConnection(host)\n else :\n h=http.client.HTTPConnection(host)\n url=self.url\n data=urllib.parse.urlencode(self.mapLogRecord(record))\n if self.method ==\"GET\":\n if (url.find('?')>=0):\n sep='&'\n else :\n sep='?'\n url=url+\"%c%s\"%(sep,data)\n h.putrequest(self.method,url)\n \n \n i=host.find(\":\")\n if i >=0:\n host=host[:i]\n h.putheader(\"Host\",host)\n if self.method ==\"POST\":\n h.putheader(\"Content-type\",\n \"application/x-www-form-urlencoded\")\n h.putheader(\"Content-length\",str(len(data)))\n if self.credentials:\n import base64\n s=('u%s:%s'%self.credentials).encode('utf-8')\n s='Basic '+base64.b64encode(s).strip()\n h.putheader('Authorization',s)\n h.endheaders()\n if self.method ==\"POST\":\n h.send(data.encode('utf-8'))\n h.getresponse()\n except (KeyboardInterrupt,SystemExit):\n raise\n except :\n self.handleError(record)\n \nclass BufferingHandler(logging.Handler):\n ''\n\n\n\n \n def __init__(self,capacity):\n ''\n\n \n logging.Handler.__init__(self)\n self.capacity=capacity\n self.buffer=[]\n \n def shouldFlush(self,record):\n ''\n\n\n\n\n \n return (len(self.buffer)>=self.capacity)\n \n def emit(self,record):\n ''\n\n\n\n\n \n self.buffer.append(record)\n if self.shouldFlush(record):\n self.flush()\n \n def flush(self):\n ''\n\n\n\n \n self.acquire()\n try :\n self.buffer=[]\n finally :\n self.release()\n \n def close(self):\n ''\n\n\n\n \n self.flush()\n logging.Handler.close(self)\n \nclass MemoryHandler(BufferingHandler):\n ''\n\n\n\n \n def __init__(self,capacity,flushLevel=logging.ERROR,target=None ):\n ''\n\n\n\n\n\n \n BufferingHandler.__init__(self,capacity)\n self.flushLevel=flushLevel\n self.target=target\n \n def shouldFlush(self,record):\n ''\n\n \n return (len(self.buffer)>=self.capacity)or (record.levelno >=self.flushLevel)\n \n def setTarget(self,target):\n ''\n\n \n self.target=target\n \n def flush(self):\n ''\n\n\n\n\n\n \n self.acquire()\n try :\n if self.target:\n for record in self.buffer:\n self.target.handle(record)\n self.buffer=[]\n finally :\n self.release()\n \n def close(self):\n ''\n\n \n self.flush()\n self.acquire()\n try :\n self.target=None\n BufferingHandler.close(self)\n finally :\n self.release()\n \n \nclass QueueHandler(logging.Handler):\n ''\n\n\n\n\n\n\n\n \n \n def __init__(self,queue):\n ''\n\n \n logging.Handler.__init__(self)\n self.queue=queue\n \n def enqueue(self,record):\n ''\n\n\n\n\n\n \n self.queue.put_nowait(record)\n \n def prepare(self,record):\n ''\n\n\n\n\n\n\n\n\n\n\n \n \n \n \n \n \n \n self.format(record)\n record.msg=record.message\n record.args=None\n record.exc_info=None\n return record\n \n def emit(self,record):\n ''\n\n\n\n \n try :\n self.enqueue(self.prepare(record))\n except (KeyboardInterrupt,SystemExit):\n raise\n except :\n self.handleError(record)\n \nif threading:\n class QueueListener(object):\n ''\n\n\n\n \n _sentinel=None\n \n def __init__(self,queue,*handlers):\n ''\n\n\n \n self.queue=queue\n self.handlers=handlers\n self._stop=threading.Event()\n self._thread=None\n \n def dequeue(self,block):\n ''\n\n\n\n\n \n return self.queue.get(block)\n \n def start(self):\n ''\n\n\n\n\n \n self._thread=t=threading.Thread(target=self._monitor)\n t.setDaemon(True )\n t.start()\n \n def prepare(self,record):\n ''\n\n\n\n\n\n \n return record\n \n def handle(self,record):\n ''\n\n\n\n\n \n record=self.prepare(record)\n for handler in self.handlers:\n handler.handle(record)\n \n def _monitor(self):\n ''\n\n\n\n\n\n \n q=self.queue\n has_task_done=hasattr(q,'task_done')\n while not self._stop.isSet():\n try :\n record=self.dequeue(True )\n if record is self._sentinel:\n break\n self.handle(record)\n if has_task_done:\n q.task_done()\n except queue.Empty:\n pass\n \n while True :\n try :\n record=self.dequeue(False )\n if record is self._sentinel:\n break\n self.handle(record)\n if has_task_done:\n q.task_done()\n except queue.Empty:\n break\n \n def enqueue_sentinel(self):\n ''\n\n\n\n\n\n \n self.queue.put_nowait(self._sentinel)\n \n def stop(self):\n ''\n\n\n\n\n\n \n self._stop.set()\n self.enqueue_sentinel()\n self._thread.join()\n self._thread=None\n"], "_sre": [".py", "\n''\n\n\n\n\n\n\n\nMAXREPEAT=2147483648\n\n\nimport operator,sys\nfrom sre_constants import ATCODES,OPCODES,CHCODES\nfrom sre_constants import SRE_INFO_PREFIX,SRE_INFO_LITERAL\nfrom sre_constants import SRE_FLAG_UNICODE,SRE_FLAG_LOCALE\n\n\nimport sys\n\n\n\nMAGIC=20031017\n\n\n\n\n\n\n\n\n\n\n\n\n\nCODESIZE=4\n\ncopyright=\"_sre.py 2.4c Copyright 2005 by Nik Haldimann\"\n\n\ndef getcodesize():\n return CODESIZE\n \ndef compile(pattern,flags,code,groups=0,groupindex={},indexgroup=[None ]):\n ''\n \n return SRE_Pattern(pattern,flags,code,groups,groupindex,indexgroup)\n \ndef getlower(char_ord,flags):\n if (char_ord <128)or (flags&SRE_FLAG_UNICODE) or (flags&SRE_FLAG_LOCALE and char_ord <256):\n \n return ord(chr(char_ord).lower())\n else :\n return char_ord\n \n \nclass SRE_Pattern:\n\n def __init__(self,pattern,flags,code,groups=0,groupindex={},indexgroup=[None ]):\n self.pattern=pattern\n self.flags=flags\n self.groups=groups\n self.groupindex=groupindex\n self._indexgroup=indexgroup\n self._code=code\n \n def match(self,string,pos=0,endpos=sys.maxsize):\n ''\n\n \n state=_State(string,pos,endpos,self.flags)\n if state.match(self._code):\n return SRE_Match(self,state)\n return None\n \n def search(self,string,pos=0,endpos=sys.maxsize):\n ''\n\n\n \n state=_State(string,pos,endpos,self.flags)\n if state.search(self._code):\n return SRE_Match(self,state)\n else :\n return None\n \n def findall(self,string,pos=0,endpos=sys.maxsize):\n ''\n matchlist=[]\n state=_State(string,pos,endpos,self.flags)\n while state.start <=state.end:\n state.reset()\n state.string_position=state.start\n if not state.search(self._code):\n break\n match=SRE_Match(self,state)\n if self.groups ==0 or self.groups ==1:\n item=match.group(self.groups)\n else :\n item=match.groups(\"\")\n matchlist.append(item)\n if state.string_position ==state.start:\n state.start +=1\n else :\n state.start=state.string_position\n return matchlist\n \n def _subx(self,template,string,count=0,subn=False ):\n filter=template\n if not callable(template)and\"\\\\\"in template:\n \n \n \n \n import re as sre\n filter=sre._subx(self,template)\n state=_State(string,0,sys.maxsize,self.flags)\n sublist=[]\n \n n=last_pos=0\n while not count or n 0):\n \n if callable(filter):\n sublist.append(filter(SRE_Match(self,state)))\n else :\n sublist.append(filter)\n last_pos=state.string_position\n n +=1\n if state.string_position ==state.start:\n state.start +=1\n else :\n state.start=state.string_position\n \n if last_pos =0 and group <=self.re.groups:\n return group\n else :\n if group in self.re.groupindex:\n return self.re.groupindex[group]\n raise IndexError(\"no such group\")\n \n def _get_slice(self,group,default):\n group_indices=self.regs[group]\n if group_indices[0]>=0:\n return self.string[group_indices[0]:group_indices[1]]\n else :\n return default\n \n def start(self,group=0):\n ''\n\n \n return self.regs[self._get_index(group)][0]\n \n def end(self,group=0):\n ''\n\n \n return self.regs[self._get_index(group)][1]\n \n def span(self,group=0):\n ''\n return self.start(group),self.end(group)\n \n def expand(self,template):\n ''\n \n import sre\n return sre._expand(self.re,self,template)\n \n def groups(self,default=None ):\n ''\n\n \n groups=[]\n for indices in self.regs[1:]:\n if indices[0]>=0:\n groups.append(self.string[indices[0]:indices[1]])\n else :\n groups.append(default)\n return tuple(groups)\n \n def groupdict(self,default=None ):\n ''\n\n \n groupdict={}\n for key,value in self.re.groupindex.items():\n groupdict[key]=self._get_slice(value,default)\n return groupdict\n \n def group(self,*args):\n ''\n \n if len(args)==0:\n args=(0,)\n grouplist=[]\n for group in args:\n grouplist.append(self._get_slice(self._get_index(group),None ))\n if len(grouplist)==1:\n return grouplist[0]\n else :\n return tuple(grouplist)\n \n def __copy__():\n raise TypeError(\"cannot copy this pattern object\")\n \n def __deepcopy__():\n raise TypeError(\"cannot copy this pattern object\")\n \n \nclass _State:\n\n def __init__(self,string,start,end,flags):\n self.string=string\n if start <0:\n start=0\n if end >len(string):\n end=len(string)\n self.start=start\n self.string_position=self.start\n self.end=end\n self.pos=start\n self.flags=flags\n self.reset()\n \n def reset(self):\n self.marks=[]\n self.lastindex=-1\n self.marks_stack=[]\n self.context_stack=[]\n self.repeat=None\n \n def match(self,pattern_codes):\n \n \n \n \n \n \n \n \n \n dispatcher=_OpcodeDispatcher()\n self.context_stack.append(_MatchContext(self,pattern_codes))\n has_matched=None\n while len(self.context_stack)>0:\n context=self.context_stack[-1]\n has_matched=dispatcher.match(context)\n if has_matched is not None :\n self.context_stack.pop()\n return has_matched\n \n def search(self,pattern_codes):\n flags=0\n if pattern_codes[0]==OPCODES[\"info\"]:\n \n \n if pattern_codes[2]&SRE_INFO_PREFIX and pattern_codes[5]>1:\n return self.fast_search(pattern_codes)\n flags=pattern_codes[2]\n pattern_codes=pattern_codes[pattern_codes[1]+1:]\n \n string_position=self.start\n if pattern_codes[0]==OPCODES[\"literal\"]:\n \n \n character=pattern_codes[1]\n while True :\n while string_position =self.end:\n return False\n self.start=string_position\n string_position +=1\n self.string_position=string_position\n if flags&SRE_INFO_LITERAL:\n return True\n if self.match(pattern_codes[2:]):\n return True\n return False\n \n \n while string_position <=self.end:\n self.reset()\n self.start=self.string_position=string_position\n if self.match(pattern_codes):\n return True\n string_position +=1\n return False\n \n def fast_search(self,pattern_codes):\n ''\n \n \n \n flags=pattern_codes[2]\n prefix_len=pattern_codes[5]\n prefix_skip=pattern_codes[6]\n prefix=pattern_codes[7:7+prefix_len]\n overlap=pattern_codes[7+prefix_len -1:pattern_codes[1]+1]\n pattern_codes=pattern_codes[pattern_codes[1]+1:]\n i=0\n string_position=self.string_position\n while string_position =len(self.marks):\n self.marks.extend([None ]*(mark_nr -len(self.marks)+1))\n self.marks[mark_nr]=position\n \n def get_marks(self,group_index):\n marks_index=2 *group_index\n if len(self.marks)>marks_index+1:\n return self.marks[marks_index],self.marks[marks_index+1]\n else :\n return None ,None\n \n def marks_push(self):\n self.marks_stack.append((self.marks[:],self.lastindex))\n \n def marks_pop(self):\n self.marks,self.lastindex=self.marks_stack.pop()\n \n def marks_pop_keep(self):\n self.marks,self.lastindex=self.marks_stack[-1]\n \n def marks_pop_discard(self):\n self.marks_stack.pop()\n \n def lower(self,char_ord):\n return getlower(char_ord,self.flags)\n \n \nclass _MatchContext:\n\n def __init__(self,state,pattern_codes):\n self.state=state\n self.pattern_codes=pattern_codes\n self.string_position=state.string_position\n self.code_position=0\n self.has_matched=None\n \n def push_new_context(self,pattern_offset):\n ''\n\n \n child_context=_MatchContext(self.state,\n self.pattern_codes[self.code_position+pattern_offset:])\n \n \n \n \n self.state.context_stack.append(child_context)\n return child_context\n \n def peek_char(self,peek=0):\n return self.state.string[self.string_position+peek]\n \n def skip_char(self,skip_count):\n self.string_position +=skip_count\n \n def remaining_chars(self):\n return self.state.end -self.string_position\n \n def peek_code(self,peek=0):\n return self.pattern_codes[self.code_position+peek]\n \n def skip_code(self,skip_count):\n self.code_position +=skip_count\n \n def remaining_codes(self):\n return len(self.pattern_codes)-self.code_position\n \n def at_beginning(self):\n return self.string_position ==0\n \n def at_end(self):\n return self.string_position ==self.state.end\n \n def at_linebreak(self):\n return not self.at_end()and _is_linebreak(self.peek_char())\n \n def at_boundary(self,word_checker):\n if self.at_beginning()and self.at_end():\n return False\n that=not self.at_beginning()and word_checker(self.peek_char(-1))\n this=not self.at_end()and word_checker(self.peek_char())\n return this !=that\n \n \nclass _RepeatContext(_MatchContext):\n\n def __init__(self,context):\n _MatchContext.__init__(self,context.state,\n context.pattern_codes[context.code_position:])\n self.count=-1\n \n self.previous=context.state.repeat\n self.last_position=None\n \n \nclass _Dispatcher:\n\n DISPATCH_TABLE=None\n \n def dispatch(self,code,context):\n method=self.DISPATCH_TABLE.get(code,self.__class__.unknown)\n return method(self,context)\n \n def unknown(self,code,ctx):\n raise NotImplementedError()\n \n def build_dispatch_table(cls,code_dict,method_prefix):\n if cls.DISPATCH_TABLE is not None :\n return\n table={}\n for key,value in code_dict.items():\n if hasattr(cls,\"%s%s\"%(method_prefix,key)):\n table[value]=getattr(cls,\"%s%s\"%(method_prefix,key))\n cls.DISPATCH_TABLE=table\n \n build_dispatch_table=classmethod(build_dispatch_table)\n \n \nclass _OpcodeDispatcher(_Dispatcher):\n\n def __init__(self):\n self.executing_contexts={}\n self.at_dispatcher=_AtcodeDispatcher()\n self.ch_dispatcher=_ChcodeDispatcher()\n self.set_dispatcher=_CharsetDispatcher()\n \n def match(self,context):\n ''\n\n \n while context.remaining_codes()>0 and context.has_matched is None :\n opcode=context.peek_code()\n if not self.dispatch(opcode,context):\n return None\n if context.has_matched is None :\n context.has_matched=False\n return context.has_matched\n \n def dispatch(self,opcode,context):\n ''\n \n \n if id(context)in self.executing_contexts:\n generator=self.executing_contexts[id(context)]\n del self.executing_contexts[id(context)]\n has_finished=next(generator)\n else :\n method=self.DISPATCH_TABLE.get(opcode,_OpcodeDispatcher.unknown)\n has_finished=method(self,context)\n if hasattr(has_finished,\"__next__\"):\n generator=has_finished\n has_finished=next(generator)\n if not has_finished:\n self.executing_contexts[id(context)]=generator\n return has_finished\n \n def op_success(self,ctx):\n \n \n ctx.state.string_position=ctx.string_position\n ctx.has_matched=True\n return True\n \n def op_failure(self,ctx):\n \n \n ctx.has_matched=False\n return True\n \n def general_op_literal(self,ctx,compare,decorate=lambda x:x):\n \n if ctx.at_end()or not compare(decorate(ord(ctx.peek_char())),\n decorate(ctx.peek_code(1))):\n ctx.has_matched=False\n ctx.skip_code(2)\n ctx.skip_char(1)\n \n def op_literal(self,ctx):\n \n \n \n self.general_op_literal(ctx,operator.eq)\n return True\n \n def op_not_literal(self,ctx):\n \n \n \n self.general_op_literal(ctx,operator.ne)\n return True\n \n def op_literal_ignore(self,ctx):\n \n \n \n self.general_op_literal(ctx,operator.eq,ctx.state.lower)\n return True\n \n def op_not_literal_ignore(self,ctx):\n \n \n \n self.general_op_literal(ctx,operator.ne,ctx.state.lower)\n return True\n \n def op_at(self,ctx):\n \n \n \n if not self.at_dispatcher.dispatch(ctx.peek_code(1),ctx):\n ctx.has_matched=False\n \n return True\n ctx.skip_code(2)\n return True\n \n def op_category(self,ctx):\n \n \n \n if ctx.at_end()or not self.ch_dispatcher.dispatch(ctx.peek_code(1),ctx):\n ctx.has_matched=False\n \n return True\n ctx.skip_code(2)\n ctx.skip_char(1)\n return True\n \n def op_any(self,ctx):\n \n \n \n if ctx.at_end()or ctx.at_linebreak():\n ctx.has_matched=False\n \n return True\n ctx.skip_code(1)\n ctx.skip_char(1)\n return True\n \n def op_any_all(self,ctx):\n \n \n \n if ctx.at_end():\n ctx.has_matched=False\n \n return True\n ctx.skip_code(1)\n ctx.skip_char(1)\n return True\n \n def general_op_in(self,ctx,decorate=lambda x:x):\n \n \n if ctx.at_end():\n ctx.has_matched=False\n \n return\n skip=ctx.peek_code(1)\n ctx.skip_code(2)\n \n \n if not self.check_charset(ctx,decorate(ord(ctx.peek_char()))):\n \n ctx.has_matched=False\n return\n ctx.skip_code(skip -1)\n ctx.skip_char(1)\n \n \n def op_in(self,ctx):\n \n \n \n self.general_op_in(ctx)\n return True\n \n def op_in_ignore(self,ctx):\n \n \n \n self.general_op_in(ctx,ctx.state.lower)\n return True\n \n def op_jump(self,ctx):\n \n \n \n ctx.skip_code(ctx.peek_code(1)+1)\n return True\n \n \n \n op_info=op_jump\n \n def op_mark(self,ctx):\n \n \n \n ctx.state.set_mark(ctx.peek_code(1),ctx.string_position)\n ctx.skip_code(2)\n return True\n \n def op_branch(self,ctx):\n \n \n \n ctx.state.marks_push()\n ctx.skip_code(1)\n current_branch_length=ctx.peek_code(0)\n while current_branch_length:\n \n \n if not (ctx.peek_code(1)==OPCODES[\"literal\"]and (ctx.at_end()or ctx.peek_code(2)!=ord(ctx.peek_char()))):\n ctx.state.string_position=ctx.string_position\n child_context=ctx.push_new_context(1)\n \n yield False\n if child_context.has_matched:\n ctx.has_matched=True\n yield True\n ctx.state.marks_pop_keep()\n ctx.skip_code(current_branch_length)\n current_branch_length=ctx.peek_code(0)\n ctx.state.marks_pop_discard()\n ctx.has_matched=False\n \n yield True\n \n def op_repeat_one(self,ctx):\n \n \n \n \n mincount=ctx.peek_code(2)\n maxcount=ctx.peek_code(3)\n \n \n \n if ctx.remaining_chars()=mincount and (ctx.at_end()or ord(ctx.peek_char())!=char):\n ctx.skip_char(-1)\n count -=1\n if count =mincount:\n ctx.state.string_position=ctx.string_position\n child_context=ctx.push_new_context(ctx.peek_code(1)+1)\n yield False\n if child_context.has_matched:\n ctx.has_matched=True\n yield True\n ctx.skip_char(-1)\n count -=1\n ctx.state.marks_pop_keep()\n \n ctx.state.marks_pop_discard()\n ctx.has_matched=False\n \n yield True\n \n def op_min_repeat_one(self,ctx):\n \n \n mincount=ctx.peek_code(2)\n maxcount=ctx.peek_code(3)\n \n \n if ctx.remaining_chars()=maxcount and maxcount !=MAXREPEAT:\n ctx.has_matched=False\n \n yield True\n repeat.count=count\n child_context=repeat.push_new_context(4)\n yield False\n ctx.has_matched=child_context.has_matched\n if not ctx.has_matched:\n repeat.count=count -1\n ctx.state.string_position=ctx.string_position\n yield True\n \n def general_op_groupref(self,ctx,decorate=lambda x:x):\n group_start,group_end=ctx.state.get_marks(ctx.peek_code(1))\n if group_start is None or group_end is None or group_end =0:\n child_context=ctx.push_new_context(3)\n yield False\n if child_context.has_matched:\n ctx.has_matched=False\n yield True\n ctx.skip_code(ctx.peek_code(1)+1)\n yield True\n \n def unknown(self,ctx):\n \n raise RuntimeError(\"Internal re error. Unknown opcode: %s\"%ctx.peek_code())\n \n def check_charset(self,ctx,char):\n ''\n \n self.set_dispatcher.reset(char)\n save_position=ctx.code_position\n result=None\n while result is None :\n result=self.set_dispatcher.dispatch(ctx.peek_code(),ctx)\n ctx.code_position=save_position\n \n return result\n \n def count_repetitions(self,ctx,maxcount):\n ''\n\n \n count=0\n real_maxcount=ctx.state.end -ctx.string_position\n if maxcount >4) &(1 <<(char_code&15)):\n return self.ok\n ctx.skip_code(16)\n else :\n if char_code <256 and ctx.peek_code(char_code >>5) &(1 <<(char_code&31)):\n return self.ok\n ctx.skip_code(8)\n def set_range(self,ctx):\n \n if ctx.peek_code(1)<=self.char <=ctx.peek_code(2):\n return self.ok\n ctx.skip_code(3)\n def set_negate(self,ctx):\n self.ok=not self.ok\n ctx.skip_code(1)\n \n \n def set_bigcharset(self,ctx):\n raise NotImplementationError(\"_sre.py: set_bigcharset, array not implemented\")\n \n char_code=self.char\n count=ctx.peek_code(1)\n ctx.skip_code(2)\n if char_code <65536:\n block_index=char_code >>8\n \n a=array.array(\"B\")\n a.fromstring(array.array(CODESIZE ==2 and\"H\"or\"I\",\n [ctx.peek_code(block_index //CODESIZE)]).tostring())\n block=a[block_index %CODESIZE]\n ctx.skip_code(256 //CODESIZE)\n block_value=ctx.peek_code(block *(32 //CODESIZE)\n +((char_code&255)>>(CODESIZE ==2 and 4 or 5)))\n if block_value&(1 <<(char_code&((8 *CODESIZE)-1))):\n return self.ok\n else :\n ctx.skip_code(256 //CODESIZE)\n ctx.skip_code(count *(32 //CODESIZE))\n \n def unknown(self,ctx):\n return False\n \n_CharsetDispatcher.build_dispatch_table(OPCODES,\"set_\")\n\n\nclass _AtcodeDispatcher(_Dispatcher):\n\n def at_beginning(self,ctx):\n return ctx.at_beginning()\n at_beginning_string=at_beginning\n def at_beginning_line(self,ctx):\n return ctx.at_beginning()or _is_linebreak(ctx.peek_char(-1))\n def at_end(self,ctx):\n return (ctx.remaining_chars()==1 and ctx.at_linebreak())or ctx.at_end()\n def at_end_line(self,ctx):\n return ctx.at_linebreak()or ctx.at_end()\n def at_end_string(self,ctx):\n return ctx.at_end()\n def at_boundary(self,ctx):\n return ctx.at_boundary(_is_word)\n def at_non_boundary(self,ctx):\n return not ctx.at_boundary(_is_word)\n def at_loc_boundary(self,ctx):\n return ctx.at_boundary(_is_loc_word)\n def at_loc_non_boundary(self,ctx):\n return not ctx.at_boundary(_is_loc_word)\n def at_uni_boundary(self,ctx):\n return ctx.at_boundary(_is_uni_word)\n def at_uni_non_boundary(self,ctx):\n return not ctx.at_boundary(_is_uni_word)\n def unknown(self,ctx):\n return False\n \n_AtcodeDispatcher.build_dispatch_table(ATCODES,\"\")\n\n\nclass _ChcodeDispatcher(_Dispatcher):\n\n def category_digit(self,ctx):\n return _is_digit(ctx.peek_char())\n def category_not_digit(self,ctx):\n return not _is_digit(ctx.peek_char())\n def category_space(self,ctx):\n return _is_space(ctx.peek_char())\n def category_not_space(self,ctx):\n return not _is_space(ctx.peek_char())\n def category_word(self,ctx):\n return _is_word(ctx.peek_char())\n def category_not_word(self,ctx):\n return not _is_word(ctx.peek_char())\n def category_linebreak(self,ctx):\n return _is_linebreak(ctx.peek_char())\n def category_not_linebreak(self,ctx):\n return not _is_linebreak(ctx.peek_char())\n def category_loc_word(self,ctx):\n return _is_loc_word(ctx.peek_char())\n def category_loc_not_word(self,ctx):\n return not _is_loc_word(ctx.peek_char())\n def category_uni_digit(self,ctx):\n return ctx.peek_char().isdigit()\n def category_uni_not_digit(self,ctx):\n return not ctx.peek_char().isdigit()\n def category_uni_space(self,ctx):\n return ctx.peek_char().isspace()\n def category_uni_not_space(self,ctx):\n return not ctx.peek_char().isspace()\n def category_uni_word(self,ctx):\n return _is_uni_word(ctx.peek_char())\n def category_uni_not_word(self,ctx):\n return not _is_uni_word(ctx.peek_char())\n def category_uni_linebreak(self,ctx):\n return ord(ctx.peek_char())in _uni_linebreaks\n def category_uni_not_linebreak(self,ctx):\n return ord(ctx.peek_char())not in _uni_linebreaks\n def unknown(self,ctx):\n return False\n \n_ChcodeDispatcher.build_dispatch_table(CHCODES,\"\")\n\n\n_ascii_char_info=[0,0,0,0,0,0,0,0,0,2,6,2,\n2,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2,0,0,\n0,0,0,0,0,0,0,0,0,0,0,0,0,25,25,25,25,25,25,25,25,\n25,25,0,0,0,0,0,0,0,24,24,24,24,24,24,24,24,24,24,\n24,24,24,24,24,24,24,24,24,24,24,24,24,24,24,24,0,0,\n0,0,16,0,24,24,24,24,24,24,24,24,24,24,24,24,24,24,\n24,24,24,24,24,24,24,24,24,24,24,24,0,0,0,0,0]\n\ndef _is_digit(char):\n code=ord(char)\n return code <128 and _ascii_char_info[code]&1\n \ndef _is_space(char):\n code=ord(char)\n return code <128 and _ascii_char_info[code]&2\n \ndef _is_word(char):\n\n code=ord(char)\n return code <128 and _ascii_char_info[code]&16\n \ndef _is_loc_word(char):\n return (not (ord(char)&~255)and char.isalnum())or char =='_'\n \ndef _is_uni_word(char):\n\n\n return chr(ord(char)).isalnum()or char =='_'\n \ndef _is_linebreak(char):\n return char ==\"\\n\"\n \n \n_uni_linebreaks=[10,13,28,29,30,133,8232,8233]\n\ndef _log(message):\n if 0:\n print(message)\n"], "fnmatch": [".py", "''\n\n\n\n\n\n\n\n\n\n\nimport os\nimport posixpath\nimport re\nimport functools\n\n__all__=[\"filter\",\"fnmatch\",\"fnmatchcase\",\"translate\"]\n\ndef fnmatch(name,pat):\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n \n name=os.path.normcase(name)\n pat=os.path.normcase(pat)\n return fnmatchcase(name,pat)\n \n@functools.lru_cache(maxsize=256,typed=True )\ndef _compile_pattern(pat):\n if isinstance(pat,bytes):\n pat_str=str(pat,'ISO-8859-1')\n res_str=translate(pat_str)\n res=bytes(res_str,'ISO-8859-1')\n else :\n res=translate(pat)\n return re.compile(res).match\n \ndef filter(names,pat):\n ''\n result=[]\n pat=os.path.normcase(pat)\n match=_compile_pattern(pat)\n if os.path is posixpath:\n \n for name in names:\n if match(name):\n result.append(name)\n else :\n for name in names:\n if match(os.path.normcase(name)):\n result.append(name)\n return result\n \ndef fnmatchcase(name,pat):\n ''\n\n\n\n \n match=_compile_pattern(pat)\n return match(name)is not None\n \n \ndef translate(pat):\n ''\n\n\n \n \n i,n=0,len(pat)\n res=''\n while i =n:\n res=res+'\\\\['\n else :\n stuff=pat[i:j].replace('\\\\','\\\\\\\\')\n i=j+1\n if stuff[0]=='!':\n stuff='^'+stuff[1:]\n elif stuff[0]=='^':\n stuff='\\\\'+stuff\n res='%s[%s]'%(res,stuff)\n else :\n res=res+re.escape(c)\n return res+'\\Z(?ms)'\n"], "multiprocessing": [".py", "\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n__version__='0.70a1'\n\n__all__=[\n'Process','current_process','active_children','freeze_support',\n'Manager','Pipe','cpu_count','log_to_stderr','get_logger',\n'allow_connection_pickling','BufferTooShort','TimeoutError',\n'Lock','RLock','Semaphore','BoundedSemaphore','Condition',\n'Event','Barrier','Queue','SimpleQueue','JoinableQueue','Pool',\n'Value','Array','RawValue','RawArray','SUBDEBUG','SUBWARNING',\n]\n\n__author__='R. Oudkerk (r.m.oudkerk@gmail.com)'\n\n\n\n\n\nimport os\nimport sys\n\nfrom multiprocessing.process import Process,current_process,active_children\nfrom multiprocessing.util import SUBDEBUG,SUBWARNING\n\n\n\n\n\nclass ProcessError(Exception):\n pass\n \nclass BufferTooShort(ProcessError):\n pass\n \nclass TimeoutError(ProcessError):\n pass\n \nclass AuthenticationError(ProcessError):\n pass\n \nimport _multiprocessing\n\n\n\n\n\ndef Manager():\n ''\n\n\n\n\n \n from multiprocessing.managers import SyncManager\n m=SyncManager()\n m.start()\n return m\n \n \n \n \n \n \n \n \n \ndef cpu_count():\n ''\n\n \n if sys.platform =='win32':\n try :\n num=int(os.environ['NUMBER_OF_PROCESSORS'])\n except (ValueError,KeyError):\n num=0\n elif'bsd'in sys.platform or sys.platform =='darwin':\n comm='/sbin/sysctl -n hw.ncpu'\n if sys.platform =='darwin':\n comm='/usr'+comm\n try :\n with os.popen(comm)as p:\n num=int(p.read())\n except ValueError:\n num=0\n else :\n try :\n num=os.sysconf('SC_NPROCESSORS_ONLN')\n except (ValueError,OSError,AttributeError):\n num=0\n \n if num >=1:\n return num\n else :\n raise NotImplementedError('cannot determine number of cpus')\n \ndef freeze_support():\n ''\n\n\n \n if sys.platform =='win32'and getattr(sys,'frozen',False ):\n from multiprocessing.forking import freeze_support\n freeze_support()\n \ndef get_logger():\n ''\n\n \n from multiprocessing.util import get_logger\n return get_logger()\n \ndef log_to_stderr(level=None ):\n ''\n\n \n from multiprocessing.util import log_to_stderr\n return log_to_stderr(level)\n \n \n \n \n \n \n \n \n \n \n \n \n \n \ndef Lock():\n ''\n\n \n from multiprocessing.synchronize import Lock\n return Lock()\n \ndef RLock():\n ''\n\n \n from multiprocessing.synchronize import RLock\n return RLock()\n \ndef Condition(lock=None ):\n ''\n\n \n from multiprocessing.synchronize import Condition\n return Condition(lock)\n \ndef Semaphore(value=1):\n ''\n\n \n from multiprocessing.synchronize import Semaphore\n return Semaphore(value)\n \ndef BoundedSemaphore(value=1):\n ''\n\n \n from multiprocessing.synchronize import BoundedSemaphore\n return BoundedSemaphore(value)\n \ndef Event():\n ''\n\n \n from multiprocessing.synchronize import Event\n return Event()\n \ndef Barrier(parties,action=None ,timeout=None ):\n ''\n\n \n from multiprocessing.synchronize import Barrier\n return Barrier(parties,action,timeout)\n \ndef Queue(maxsize=0):\n ''\n\n \n from multiprocessing.queues import Queue\n return Queue(maxsize)\n \ndef JoinableQueue(maxsize=0):\n ''\n\n \n from multiprocessing.queues import JoinableQueue\n return JoinableQueue(maxsize)\n \ndef SimpleQueue():\n ''\n\n \n from multiprocessing.queues import SimpleQueue\n return SimpleQueue()\n \ndef Pool(processes=None ,initializer=None ,initargs=(),maxtasksperchild=None ):\n ''\n\n \n from multiprocessing.pool import Pool\n return Pool(processes,initializer,initargs,maxtasksperchild)\n \ndef RawValue(typecode_or_type,*args):\n ''\n\n \n from multiprocessing.sharedctypes import RawValue\n return RawValue(typecode_or_type,*args)\n \ndef RawArray(typecode_or_type,size_or_initializer):\n ''\n\n \n from multiprocessing.sharedctypes import RawArray\n return RawArray(typecode_or_type,size_or_initializer)\n \ndef Value(typecode_or_type,*args,lock=True ):\n ''\n\n \n from multiprocessing.sharedctypes import Value\n return Value(typecode_or_type,*args,lock=lock)\n \ndef Array(typecode_or_type,size_or_initializer,*,lock=True ):\n ''\n\n \n from multiprocessing.sharedctypes import Array\n return Array(typecode_or_type,size_or_initializer,lock=lock)\n \n \n \n \n \nif sys.platform =='win32':\n\n def set_executable(executable):\n ''\n\n\n\n \n from multiprocessing.forking import set_executable\n set_executable(executable)\n \n __all__ +=['set_executable']\n", 1], "multiprocessing.process": [".py", "\n\n\n\n\n\n\n\n\n__all__=['Process','current_process','active_children']\n\n\n\n\n\nimport os\nimport sys\nimport signal\nimport itertools\nfrom _weakrefset import WeakSet\n\n\nfrom _multiprocessing import Process\n\n\n\n\ntry :\n ORIGINAL_DIR=os.path.abspath(os.getcwd())\nexcept OSError:\n ORIGINAL_DIR=None\n \n \n \n \n \ndef current_process():\n ''\n\n \n return _current_process\n \ndef active_children():\n ''\n\n \n _cleanup()\n return list(_current_process._children)\n \n \n \n \n \ndef _cleanup():\n\n for p in list(_current_process._children):\n if p._popen.poll()is not None :\n _current_process._children.discard(p)\n \n \n \n \n \n \n \n \n \n \n \n \nclass AuthenticationString(bytes):\n def __reduce__(self):\n from .forking import Popen\n if not Popen.thread_is_spawning():\n raise TypeError(\n 'Pickling an AuthenticationString object is '\n 'disallowed for security reasons'\n )\n return AuthenticationString,(bytes(self),)\n \n \n \n \n \nclass _MainProcess(Process):\n\n def __init__(self):\n self._identity=()\n self._daemonic=False\n self._name='MainProcess'\n self._parent_pid=None\n self._popen=None\n self._counter=itertools.count(1)\n self._children=set()\n self._authkey=AuthenticationString(os.urandom(32))\n self._tempdir=None\n \n_current_process=_MainProcess()\ndel _MainProcess\n\n\n\n\n\n_exitcode_to_name={}\n\nfor name,signum in list(signal.__dict__.items()):\n if name[:3]=='SIG'and'_'not in name:\n _exitcode_to_name[-signum]=name\n \n \n_dangling=WeakSet()\n"], "abc": [".py", "\n\n\n\"\"\"Abstract Base Classes (ABCs) according to PEP 3119.\"\"\"\n\nfrom _weakrefset import WeakSet\n\ndef abstractmethod(funcobj):\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n funcobj.__isabstractmethod__=True\n return funcobj\n \n \nclass abstractclassmethod(classmethod):\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n \n __isabstractmethod__=True\n \n def __init__(self,callable):\n callable.__isabstractmethod__=True\n super().__init__(callable)\n \n \nclass abstractstaticmethod(staticmethod):\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n \n __isabstractmethod__=True\n \n def __init__(self,callable):\n callable.__isabstractmethod__=True\n super().__init__(callable)\n \n \nclass abstractproperty(property):\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n \n __isabstractmethod__=True\n \n \nclass ABCMeta(type):\n\n ''\n\n\n\n\n\n\n\n\n\n\n\n \n \n \n \n \n _abc_invalidation_counter=0\n \n def __new__(mcls,name,bases,namespace):\n cls=super().__new__(mcls,name,bases,namespace)\n \n abstracts={name\n for name,value in namespace.items()\n if getattr(value,\"__isabstractmethod__\",False )}\n for base in bases:\n for name in getattr(base,\"__abstractmethods__\",set()):\n value=getattr(cls,name,None )\n if getattr(value,\"__isabstractmethod__\",False ):\n abstracts.add(name)\n cls.__abstractmethods__=frozenset(abstracts)\n \n cls._abc_registry=WeakSet()\n cls._abc_cache=WeakSet()\n cls._abc_negative_cache=WeakSet()\n cls._abc_negative_cache_version=ABCMeta._abc_invalidation_counter\n return cls\n \n def register(cls,subclass):\n ''\n\n\n \n if not isinstance(subclass,type):\n raise TypeError(\"Can only register classes\")\n if issubclass(subclass,cls):\n return subclass\n \n \n if issubclass(cls,subclass):\n \n raise RuntimeError(\"Refusing to create an inheritance cycle\")\n cls._abc_registry.add(subclass)\n ABCMeta._abc_invalidation_counter +=1\n return subclass\n \n def _dump_registry(cls,file=None ):\n ''\n print(\"Class: %s.%s\"%(cls.__module__,cls.__name__),file=file)\n print(\"Inv.counter: %s\"%ABCMeta._abc_invalidation_counter,file=file)\n for name in sorted(cls.__dict__.keys()):\n if name.startswith(\"_abc_\"):\n value=getattr(cls,name)\n print(\"%s: %r\"%(name,value),file=file)\n \n def __instancecheck__(cls,instance):\n ''\n \n subclass=instance.__class__\n if subclass in cls._abc_cache:\n return True\n subtype=type(instance)\n if subtype is subclass:\n if (cls._abc_negative_cache_version ==\n ABCMeta._abc_invalidation_counter and\n subclass in cls._abc_negative_cache):\n return False\n \n return cls.__subclasscheck__(subclass)\n return any(cls.__subclasscheck__(c)for c in {subclass,subtype})\n \n def __subclasscheck__(cls,subclass):\n ''\n \n if subclass in cls._abc_cache:\n return True\n \n if cls._abc_negative_cache_version '%(self.__class__.__name__,self.path_entry)\n \n def find_module(self,fullname,path=None ):\n path=path or self.path_entry\n \n for _ext in ['js','pyj','py']:\n _filepath=os.path.join(self.path_entry,'%s.%s'%(fullname,_ext))\n if _filepath in VFS:\n print(\"module found at %s:%s\"%(_filepath,fullname))\n return VFSModuleLoader(_filepath,fullname)\n \n print('module %s not found'%fullname)\n raise ImportError()\n return None\n \nclass VFSModuleLoader:\n ''\n \n def __init__(self,filepath,name):\n self._filepath=filepath\n self._name=name\n \n def get_source(self):\n if self._filepath in VFS:\n return JSObject(readFromVFS(self._filepath))\n \n raise ImportError('could not find source for %s'%fullname)\n \n def is_package(self):\n return'.'in self._name\n \n def load_module(self):\n if self._name in sys.modules:\n \n mod=sys.modules[self._name]\n return mod\n \n _src=self.get_source()\n if self._filepath.endswith('.js'):\n mod=JSObject(import_js_module(_src,self._filepath,self._name))\n elif self._filepath.endswith('.py'):\n mod=JSObject(import_py_module(_src,self._filepath,self._name))\n elif self._filepath.endswith('.pyj'):\n mod=JSObject(import_pyj_module(_src,self._filepath,self._name))\n else :\n raise ImportError('Invalid Module: %s'%self._filepath)\n \n \n mod.__file__=self._filepath\n mod.__name__=self._name\n mod.__path__=os.path.abspath(self._filepath)\n mod.__loader__=self\n mod.__package__='.'.join(self._name.split('.')[:-1])\n \n if self.is_package():\n print('adding path for package')\n \n \n mod.__path__=[self.path_entry]\n else :\n print('imported as regular module')\n \n print('creating a new module object for \"%s\"'%self._name)\n sys.modules.setdefault(self._name,mod)\n JSObject(__BRYTHON__.imported)[self._name]=mod\n \n return mod\n \nJSObject(__BRYTHON__.path_hooks.insert(0,VFSModuleFinder))\n"], "encodings.cp1255": [".py", "''\n\n\n\nimport codecs\n\n\n\nclass Codec(codecs.Codec):\n\n def encode(self,input,errors='strict'):\n return codecs.charmap_encode(input,errors,encoding_table)\n \n def decode(self,input,errors='strict'):\n return codecs.charmap_decode(input,errors,decoding_table)\n \nclass IncrementalEncoder(codecs.IncrementalEncoder):\n def encode(self,input,final=False ):\n return codecs.charmap_encode(input,self.errors,encoding_table)[0]\n \nclass IncrementalDecoder(codecs.IncrementalDecoder):\n def decode(self,input,final=False ):\n return codecs.charmap_decode(input,self.errors,decoding_table)[0]\n \nclass StreamWriter(Codec,codecs.StreamWriter):\n pass\n \nclass StreamReader(Codec,codecs.StreamReader):\n pass\n \n \n \ndef getregentry():\n return codecs.CodecInfo(\n name='cp1255',\n encode=Codec().encode,\n decode=Codec().decode,\n incrementalencoder=IncrementalEncoder,\n incrementaldecoder=IncrementalDecoder,\n streamreader=StreamReader,\n streamwriter=StreamWriter,\n )\n \n \n \n \ndecoding_table=(\n'\\x00'\n'\\x01'\n'\\x02'\n'\\x03'\n'\\x04'\n'\\x05'\n'\\x06'\n'\\x07'\n'\\x08'\n'\\t'\n'\\n'\n'\\x0b'\n'\\x0c'\n'\\r'\n'\\x0e'\n'\\x0f'\n'\\x10'\n'\\x11'\n'\\x12'\n'\\x13'\n'\\x14'\n'\\x15'\n'\\x16'\n'\\x17'\n'\\x18'\n'\\x19'\n'\\x1a'\n'\\x1b'\n'\\x1c'\n'\\x1d'\n'\\x1e'\n'\\x1f'\n' '\n'!'\n'\"'\n'#'\n'$'\n'%'\n'&'\n\"'\"\n'('\n')'\n'*'\n'+'\n','\n'-'\n'.'\n'/'\n'0'\n'1'\n'2'\n'3'\n'4'\n'5'\n'6'\n'7'\n'8'\n'9'\n':'\n';'\n'<'\n'='\n'>'\n'?'\n'@'\n'A'\n'B'\n'C'\n'D'\n'E'\n'F'\n'G'\n'H'\n'I'\n'J'\n'K'\n'L'\n'M'\n'N'\n'O'\n'P'\n'Q'\n'R'\n'S'\n'T'\n'U'\n'V'\n'W'\n'X'\n'Y'\n'Z'\n'['\n'\\\\'\n']'\n'^'\n'_'\n'`'\n'a'\n'b'\n'c'\n'd'\n'e'\n'f'\n'g'\n'h'\n'i'\n'j'\n'k'\n'l'\n'm'\n'n'\n'o'\n'p'\n'q'\n'r'\n's'\n't'\n'u'\n'v'\n'w'\n'x'\n'y'\n'z'\n'{'\n'|'\n'}'\n'~'\n'\\x7f'\n'\\u20ac'\n'\\ufffe'\n'\\u201a'\n'\\u0192'\n'\\u201e'\n'\\u2026'\n'\\u2020'\n'\\u2021'\n'\\u02c6'\n'\\u2030'\n'\\ufffe'\n'\\u2039'\n'\\ufffe'\n'\\ufffe'\n'\\ufffe'\n'\\ufffe'\n'\\ufffe'\n'\\u2018'\n'\\u2019'\n'\\u201c'\n'\\u201d'\n'\\u2022'\n'\\u2013'\n'\\u2014'\n'\\u02dc'\n'\\u2122'\n'\\ufffe'\n'\\u203a'\n'\\ufffe'\n'\\ufffe'\n'\\ufffe'\n'\\ufffe'\n'\\xa0'\n'\\xa1'\n'\\xa2'\n'\\xa3'\n'\\u20aa'\n'\\xa5'\n'\\xa6'\n'\\xa7'\n'\\xa8'\n'\\xa9'\n'\\xd7'\n'\\xab'\n'\\xac'\n'\\xad'\n'\\xae'\n'\\xaf'\n'\\xb0'\n'\\xb1'\n'\\xb2'\n'\\xb3'\n'\\xb4'\n'\\xb5'\n'\\xb6'\n'\\xb7'\n'\\xb8'\n'\\xb9'\n'\\xf7'\n'\\xbb'\n'\\xbc'\n'\\xbd'\n'\\xbe'\n'\\xbf'\n'\\u05b0'\n'\\u05b1'\n'\\u05b2'\n'\\u05b3'\n'\\u05b4'\n'\\u05b5'\n'\\u05b6'\n'\\u05b7'\n'\\u05b8'\n'\\u05b9'\n'\\ufffe'\n'\\u05bb'\n'\\u05bc'\n'\\u05bd'\n'\\u05be'\n'\\u05bf'\n'\\u05c0'\n'\\u05c1'\n'\\u05c2'\n'\\u05c3'\n'\\u05f0'\n'\\u05f1'\n'\\u05f2'\n'\\u05f3'\n'\\u05f4'\n'\\ufffe'\n'\\ufffe'\n'\\ufffe'\n'\\ufffe'\n'\\ufffe'\n'\\ufffe'\n'\\ufffe'\n'\\u05d0'\n'\\u05d1'\n'\\u05d2'\n'\\u05d3'\n'\\u05d4'\n'\\u05d5'\n'\\u05d6'\n'\\u05d7'\n'\\u05d8'\n'\\u05d9'\n'\\u05da'\n'\\u05db'\n'\\u05dc'\n'\\u05dd'\n'\\u05de'\n'\\u05df'\n'\\u05e0'\n'\\u05e1'\n'\\u05e2'\n'\\u05e3'\n'\\u05e4'\n'\\u05e5'\n'\\u05e6'\n'\\u05e7'\n'\\u05e8'\n'\\u05e9'\n'\\u05ea'\n'\\ufffe'\n'\\ufffe'\n'\\u200e'\n'\\u200f'\n'\\ufffe'\n)\n\n\nencoding_table=codecs.charmap_build(decoding_table)\n"], "_struct": [".py", "\n\n\n\n\n\n\n\n\n\n\n\"\"\"Functions to convert between Python values and C structs.\nPython strings are used to hold the data representing the C struct\nand also as format strings to describe the layout of data in the C struct.\n\nThe optional first format char indicates byte order, size and alignment:\n @: native order, size & alignment (default)\n =: native order, std. size & alignment\n <: little-endian, std. size & alignment\n >: big-endian, std. size & alignment\n !: same as >\n\nThe remaining chars indicate types of args and must match exactly;\nthese can be preceded by a decimal repeat count:\n x: pad byte (no data);\n c:char;\n b:signed byte;\n B:unsigned byte;\n h:short;\n H:unsigned short;\n i:int;\n I:unsigned int;\n l:long;\n L:unsigned long;\n f:float;\n d:double.\nSpecial cases (preceding decimal count indicates length):\n s:string (array of char); p: pascal string (with count byte).\nSpecial case (only available in native format):\n P:an integer type that is wide enough to hold a pointer.\nSpecial case (not in native mode unless 'long long' in platform C):\n q:long long;\n Q:unsigned long long\nWhitespace between formats is ignored.\n\nThe variable struct.error is an exception raised on errors.\"\"\"\n\nimport math,sys\n\n\nclass StructError(Exception):\n pass\nerror=StructError\ndef unpack_int(data,index,size,le):\n bytes=[b for b in data[index:index+size]]\n if le =='little':\n bytes.reverse()\n number=0\n for b in bytes:\n number=number <<8 |b\n return int(number)\n \ndef unpack_signed_int(data,index,size,le):\n number=unpack_int(data,index,size,le)\n max=2 **(size *8)\n if number >2 **(size *8 -1)-1:\n number=int(-1 *(max -number))\n return number\n \nINFINITY=1e200 *1e200\nNAN=INFINITY /INFINITY\n\ndef unpack_char(data,index,size,le):\n return data[index:index+size]\n \ndef pack_int(number,size,le):\n x=number\n res=[]\n for i in range(size):\n res.append(x&0xff)\n x >>=8\n if le =='big':\n res.reverse()\n return bytes(res)\n \ndef pack_signed_int(number,size,le):\n if not isinstance(number,int):\n raise StructError(\"argument for i,I,l,L,q,Q,h,H must be integer\")\n if number >2 **(8 *size -1)-1 or number <-1 *2 **(8 *size -1):\n raise OverflowError(\"Number:%i too large to convert\"%number)\n return pack_int(number,size,le)\n \ndef pack_unsigned_int(number,size,le):\n if not isinstance(number,int):\n raise StructError(\"argument for i,I,l,L,q,Q,h,H must be integer\")\n if number <0:\n raise TypeError(\"can't convert negative long to unsigned\")\n if number >2 **(8 *size)-1:\n raise OverflowError(\"Number:%i too large to convert\"%number)\n return pack_int(number,size,le)\n \ndef pack_char(char,size,le):\n return bytes(char)\n \ndef isinf(x):\n return x !=0.0 and x /2 ==x\ndef isnan(v):\n return v !=v *1.0 or (v ==1.0 and v ==2.0)\n \ndef pack_float(x,size,le):\n unsigned=float_pack(x,size)\n result=[]\n for i in range(size):\n result.append((unsigned >>(i *8))&0xFF)\n if le ==\"big\":\n result.reverse()\n return bytes(result)\n \ndef unpack_float(data,index,size,le):\n binary=[data[i]for i in range(index,index+size)]\n if le ==\"big\":\n binary.reverse()\n unsigned=0\n for i in range(size):\n unsigned |=binary[i]<<(i *8)\n return float_unpack(unsigned,size,le)\n \ndef round_to_nearest(x):\n ''\n\n\n\n\n\n\n\n\n \n int_part=int(x)\n frac_part=x -int_part\n if frac_part >0.5 or frac_part ==0.5 and int_part&1 ==1:\n int_part +=1\n return int_part\n \ndef float_unpack(Q,size,le):\n ''\n \n \n if size ==8:\n MIN_EXP=-1021\n MAX_EXP=1024\n MANT_DIG=53\n BITS=64\n elif size ==4:\n MIN_EXP=-125\n MAX_EXP=128\n MANT_DIG=24\n BITS=32\n else :\n raise ValueError(\"invalid size value\")\n \n if Q >>BITS:\n raise ValueError(\"input out of range\")\n \n \n sign=Q >>BITS -1\n exp=(Q&((1 <>MANT_DIG -1\n mant=Q&((1 <0:\n \n mant=round_to_nearest(m *(1 <=0:\n mant=round_to_nearest(m *(1 <=MAX_EXP -MIN_EXP+2:\n raise OverflowError(\"float too large to pack in this format\")\n \n \n assert 0 <=mant <1 <':(default,'big'),\n'!':(default,'big'),\n'=':(default,sys.byteorder),\n'@':(default,sys.byteorder)\n}\n\ndef getmode(fmt):\n try :\n formatdef,endianness=formatmode[fmt[0]]\n alignment=fmt[0]not in formatmode or fmt[0]=='@'\n index=1\n except (IndexError,KeyError):\n formatdef,endianness=formatmode['@']\n alignment=True\n index=0\n return formatdef,endianness,index,alignment\n \ndef getNum(fmt,i):\n num=None\n cur=fmt[i]\n while ('0'<=cur)and (cur <='9'):\n if num ==None :\n num=int(cur)\n else :\n num=10 *num+int(cur)\n i +=1\n cur=fmt[i]\n return num,i\n \ndef calcsize(fmt):\n ''\n\n \n \n formatdef,endianness,i,alignment=getmode(fmt)\n num=0\n result=0\n while i 0:\n result +=[bytes([len(args[0])])+args[0][:num -1]+b'\\0'*padding]\n else :\n if num <255:\n result +=[bytes([num -1])+args[0][:num -1]]\n else :\n result +=[bytes([255])+args[0][:num -1]]\n args.pop(0)\n else :\n raise StructError(\"arg for string format not a string\")\n \n else :\n if len(args)=num:\n n=num -1\n result.append(data[j+1:j+n+1])\n j +=num\n else :\n \n if j >0 and alignment:\n padding=format['size']-j %format['size']\n j +=padding\n for n in range(num):\n result +=[format['unpack'](data,j,format['size'],endianness)]\n j +=format['size']\n \n return tuple(result)\n \ndef pack_into(fmt,buf,offset,*args):\n data=pack(fmt,*args)\n buf[offset:offset+len(data)]=data\n \ndef unpack_from(fmt,buf,offset=0):\n size=calcsize(fmt)\n data=buf[offset:offset+size]\n if len(data)!=size:\n raise error(\"unpack_from requires a buffer of at least %d bytes\"\n %(size,))\n return unpack(fmt,data)\n \ndef _clearcache():\n ''\n \n \nclass Struct:\n\n def __init__(self,fmt):\n self.format=fmt\n \n def pack(self,*args):\n return pack(self.format,*args)\n \n def pack_into(self,*args):\n return pack_into(self.format,*args)\n \n def unpack(self,*args):\n return unpack(self.format,*args)\n \n def unpack_from(self,*args):\n return unpack_from(self.format,*args)\n \nif __name__ =='__main__':\n t=pack('Bf',1,2)\n print(t,len(t))\n print(unpack('Bf',t))\n print(calcsize('Bf'))\n \n"], "optparse": [".py", "''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n__version__=\"1.5.3\"\n\n__all__=['Option',\n'make_option',\n'SUPPRESS_HELP',\n'SUPPRESS_USAGE',\n'Values',\n'OptionContainer',\n'OptionGroup',\n'OptionParser',\n'HelpFormatter',\n'IndentedHelpFormatter',\n'TitledHelpFormatter',\n'OptParseError',\n'OptionError',\n'OptionConflictError',\n'OptionValueError',\n'BadOptionError']\n\n__copyright__=\"\"\"\nCopyright (c) 2001-2006 Gregory P. Ward. All rights reserved.\nCopyright (c) 2002-2006 Python Software Foundation. All rights reserved.\n\nRedistribution and use in source and binary forms, with or without\nmodification, are permitted provided that the following conditions are\nmet:\n\n * Redistributions of source code must retain the above copyright\n notice, this list of conditions and the following disclaimer.\n\n * Redistributions in binary form must reproduce the above copyright\n notice, this list of conditions and the following disclaimer in the\n documentation and/or other materials provided with the distribution.\n\n * Neither the name of the author nor the names of its\n contributors may be used to endorse or promote products derived from\n this software without specific prior written permission.\n\nTHIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS \"AS\nIS\" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED\nTO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A\nPARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE AUTHOR OR\nCONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,\nEXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,\nPROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR\nPROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF\nLIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING\nNEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS\nSOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.\n\"\"\"\n\nimport sys,os\nimport textwrap\n\ndef _repr(self):\n return\"<%s at 0x%x: %s>\"%(self.__class__.__name__,id(self),self)\n \n \n \n \n \n \n \n \ntry :\n from gettext import gettext,ngettext\nexcept ImportError:\n def gettext(message):\n return message\n \n def ngettext(singular,plural,n):\n if n ==1:\n return singular\n return plural\n \n_=gettext\n\n\nclass OptParseError(Exception):\n def __init__(self,msg):\n self.msg=msg\n \n def __str__(self):\n return self.msg\n \n \nclass OptionError(OptParseError):\n ''\n\n\n \n \n def __init__(self,msg,option):\n self.msg=msg\n self.option_id=str(option)\n \n def __str__(self):\n if self.option_id:\n return\"option %s: %s\"%(self.option_id,self.msg)\n else :\n return self.msg\n \nclass OptionConflictError(OptionError):\n ''\n\n \n \nclass OptionValueError(OptParseError):\n ''\n\n\n \n \nclass BadOptionError(OptParseError):\n ''\n\n \n def __init__(self,opt_str):\n self.opt_str=opt_str\n \n def __str__(self):\n return _(\"no such option: %s\")%self.opt_str\n \nclass AmbiguousOptionError(BadOptionError):\n ''\n\n \n def __init__(self,opt_str,possibilities):\n BadOptionError.__init__(self,opt_str)\n self.possibilities=possibilities\n \n def __str__(self):\n return (_(\"ambiguous option: %s (%s?)\")\n %(self.opt_str,\", \".join(self.possibilities)))\n \n \nclass HelpFormatter:\n\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n \n NO_DEFAULT_VALUE=\"none\"\n \n def __init__(self,\n indent_increment,\n max_help_position,\n width,\n short_first):\n self.parser=None\n self.indent_increment=indent_increment\n self.help_position=self.max_help_position=max_help_position\n if width is None :\n try :\n width=int(os.environ['COLUMNS'])\n except (KeyError,ValueError):\n width=80\n width -=2\n self.width=width\n self.current_indent=0\n self.level=0\n self.help_width=None\n self.short_first=short_first\n self.default_tag=\"%default\"\n self.option_strings={}\n self._short_opt_fmt=\"%s %s\"\n self._long_opt_fmt=\"%s=%s\"\n \n def set_parser(self,parser):\n self.parser=parser\n \n def set_short_opt_delimiter(self,delim):\n if delim not in (\"\",\" \"):\n raise ValueError(\n \"invalid metavar delimiter for short options: %r\"%delim)\n self._short_opt_fmt=\"%s\"+delim+\"%s\"\n \n def set_long_opt_delimiter(self,delim):\n if delim not in (\"=\",\" \"):\n raise ValueError(\n \"invalid metavar delimiter for long options: %r\"%delim)\n self._long_opt_fmt=\"%s\"+delim+\"%s\"\n \n def indent(self):\n self.current_indent +=self.indent_increment\n self.level +=1\n \n def dedent(self):\n self.current_indent -=self.indent_increment\n assert self.current_indent >=0,\"Indent decreased below 0.\"\n self.level -=1\n \n def format_usage(self,usage):\n raise NotImplementedError(\"subclasses must implement\")\n \n def format_heading(self,heading):\n raise NotImplementedError(\"subclasses must implement\")\n \n def _format_text(self,text):\n ''\n\n\n \n text_width=self.width -self.current_indent\n indent=\" \"*self.current_indent\n return textwrap.fill(text,\n text_width,\n initial_indent=indent,\n subsequent_indent=indent)\n \n def format_description(self,description):\n if description:\n return self._format_text(description)+\"\\n\"\n else :\n return\"\"\n \n def format_epilog(self,epilog):\n if epilog:\n return\"\\n\"+self._format_text(epilog)+\"\\n\"\n else :\n return\"\"\n \n \n def expand_default(self,option):\n if self.parser is None or not self.default_tag:\n return option.help\n \n default_value=self.parser.defaults.get(option.dest)\n if default_value is NO_DEFAULT or default_value is None :\n default_value=self.NO_DEFAULT_VALUE\n \n return option.help.replace(self.default_tag,str(default_value))\n \n def format_option(self,option):\n \n \n \n \n \n \n \n \n \n \n \n \n \n \n result=[]\n opts=self.option_strings[option]\n opt_width=self.help_position -self.current_indent -2\n if len(opts)>opt_width:\n opts=\"%*s%s\\n\"%(self.current_indent,\"\",opts)\n indent_first=self.help_position\n else :\n opts=\"%*s%-*s \"%(self.current_indent,\"\",opt_width,opts)\n indent_first=0\n result.append(opts)\n if option.help:\n help_text=self.expand_default(option)\n help_lines=textwrap.wrap(help_text,self.help_width)\n result.append(\"%*s%s\\n\"%(indent_first,\"\",help_lines[0]))\n result.extend([\"%*s%s\\n\"%(self.help_position,\"\",line)\n for line in help_lines[1:]])\n elif opts[-1]!=\"\\n\":\n result.append(\"\\n\")\n return\"\".join(result)\n \n def store_option_strings(self,parser):\n self.indent()\n max_len=0\n for opt in parser.option_list:\n strings=self.format_option_strings(opt)\n self.option_strings[opt]=strings\n max_len=max(max_len,len(strings)+self.current_indent)\n self.indent()\n for group in parser.option_groups:\n for opt in group.option_list:\n strings=self.format_option_strings(opt)\n self.option_strings[opt]=strings\n max_len=max(max_len,len(strings)+self.current_indent)\n self.dedent()\n self.dedent()\n self.help_position=min(max_len+2,self.max_help_position)\n self.help_width=self.width -self.help_position\n \n def format_option_strings(self,option):\n ''\n if option.takes_value():\n metavar=option.metavar or option.dest.upper()\n short_opts=[self._short_opt_fmt %(sopt,metavar)\n for sopt in option._short_opts]\n long_opts=[self._long_opt_fmt %(lopt,metavar)\n for lopt in option._long_opts]\n else :\n short_opts=option._short_opts\n long_opts=option._long_opts\n \n if self.short_first:\n opts=short_opts+long_opts\n else :\n opts=long_opts+short_opts\n \n return\", \".join(opts)\n \nclass IndentedHelpFormatter(HelpFormatter):\n ''\n \n \n def __init__(self,\n indent_increment=2,\n max_help_position=24,\n width=None ,\n short_first=1):\n HelpFormatter.__init__(\n self,indent_increment,max_help_position,width,short_first)\n \n def format_usage(self,usage):\n return _(\"Usage: %s\\n\")%usage\n \n def format_heading(self,heading):\n return\"%*s%s:\\n\"%(self.current_indent,\"\",heading)\n \n \nclass TitledHelpFormatter(HelpFormatter):\n ''\n \n \n def __init__(self,\n indent_increment=0,\n max_help_position=24,\n width=None ,\n short_first=0):\n HelpFormatter.__init__(\n self,indent_increment,max_help_position,width,short_first)\n \n def format_usage(self,usage):\n return\"%s %s\\n\"%(self.format_heading(_(\"Usage\")),usage)\n \n def format_heading(self,heading):\n return\"%s\\n%s\\n\"%(heading,\"=-\"[self.level]*len(heading))\n \n \ndef _parse_num(val,type):\n if val[:2].lower()==\"0x\":\n radix=16\n elif val[:2].lower()==\"0b\":\n radix=2\n val=val[2:]or\"0\"\n elif val[:1]==\"0\":\n radix=8\n else :\n radix=10\n \n return type(val,radix)\n \ndef _parse_int(val):\n return _parse_num(val,int)\n \n_builtin_cvt={\"int\":(_parse_int,_(\"integer\")),\n\"long\":(_parse_int,_(\"integer\")),\n\"float\":(float,_(\"floating-point\")),\n\"complex\":(complex,_(\"complex\"))}\n\ndef check_builtin(option,opt,value):\n (cvt,what)=_builtin_cvt[option.type]\n try :\n return cvt(value)\n except ValueError:\n raise OptionValueError(\n _(\"option %s: invalid %s value: %r\")%(opt,what,value))\n \ndef check_choice(option,opt,value):\n if value in option.choices:\n return value\n else :\n choices=\", \".join(map(repr,option.choices))\n raise OptionValueError(\n _(\"option %s: invalid choice: %r (choose from %s)\")\n %(opt,value,choices))\n \n \n \nNO_DEFAULT=(\"NO\",\"DEFAULT\")\n\n\nclass Option:\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n \n \n \n ATTRS=['action',\n 'type',\n 'dest',\n 'default',\n 'nargs',\n 'const',\n 'choices',\n 'callback',\n 'callback_args',\n 'callback_kwargs',\n 'help',\n 'metavar']\n \n \n \n ACTIONS=(\"store\",\n \"store_const\",\n \"store_true\",\n \"store_false\",\n \"append\",\n \"append_const\",\n \"count\",\n \"callback\",\n \"help\",\n \"version\")\n \n \n \n \n STORE_ACTIONS=(\"store\",\n \"store_const\",\n \"store_true\",\n \"store_false\",\n \"append\",\n \"append_const\",\n \"count\")\n \n \n \n TYPED_ACTIONS=(\"store\",\n \"append\",\n \"callback\")\n \n \n \n ALWAYS_TYPED_ACTIONS=(\"store\",\n \"append\")\n \n \n CONST_ACTIONS=(\"store_const\",\n \"append_const\")\n \n \n \n TYPES=(\"string\",\"int\",\"long\",\"float\",\"complex\",\"choice\")\n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n TYPE_CHECKER={\"int\":check_builtin,\n \"long\":check_builtin,\n \"float\":check_builtin,\n \"complex\":check_builtin,\n \"choice\":check_choice,\n }\n \n \n \n \n \n \n \n \n \n \n CHECK_METHODS=None\n \n \n \n \n def __init__(self,*opts,**attrs):\n \n \n self._short_opts=[]\n self._long_opts=[]\n opts=self._check_opt_strings(opts)\n self._set_opt_strings(opts)\n \n \n self._set_attrs(attrs)\n \n \n \n \n \n \n for checker in self.CHECK_METHODS:\n checker(self)\n \n def _check_opt_strings(self,opts):\n \n \n \n opts=[opt for opt in opts if opt]\n if not opts:\n raise TypeError(\"at least one option string must be supplied\")\n return opts\n \n def _set_opt_strings(self,opts):\n for opt in opts:\n if len(opt)<2:\n raise OptionError(\n \"invalid option string %r: \"\n \"must be at least two characters long\"%opt,self)\n elif len(opt)==2:\n if not (opt[0]==\"-\"and opt[1]!=\"-\"):\n raise OptionError(\n \"invalid short option string %r: \"\n \"must be of the form -x, (x any non-dash char)\"%opt,\n self)\n self._short_opts.append(opt)\n else :\n if not (opt[0:2]==\"--\"and opt[2]!=\"-\"):\n raise OptionError(\n \"invalid long option string %r: \"\n \"must start with --, followed by non-dash\"%opt,\n self)\n self._long_opts.append(opt)\n \n def _set_attrs(self,attrs):\n for attr in self.ATTRS:\n if attr in attrs:\n setattr(self,attr,attrs[attr])\n del attrs[attr]\n else :\n if attr =='default':\n setattr(self,attr,NO_DEFAULT)\n else :\n setattr(self,attr,None )\n if attrs:\n attrs=sorted(attrs.keys())\n raise OptionError(\n \"invalid keyword arguments: %s\"%\", \".join(attrs),\n self)\n \n \n \n \n def _check_action(self):\n if self.action is None :\n self.action=\"store\"\n elif self.action not in self.ACTIONS:\n raise OptionError(\"invalid action: %r\"%self.action,self)\n \n def _check_type(self):\n if self.type is None :\n if self.action in self.ALWAYS_TYPED_ACTIONS:\n if self.choices is not None :\n \n self.type=\"choice\"\n else :\n \n self.type=\"string\"\n else :\n \n \n \n \n \n import builtins\n if (isinstance(self.type,type)or\n (hasattr(self.type,\"__name__\")and\n getattr(builtins,self.type.__name__,None )is self.type)):\n self.type=self.type.__name__\n \n if self.type ==\"str\":\n self.type=\"string\"\n \n if self.type not in self.TYPES:\n raise OptionError(\"invalid option type: %r\"%self.type,self)\n if self.action not in self.TYPED_ACTIONS:\n raise OptionError(\n \"must not supply a type for action %r\"%self.action,self)\n \n def _check_choice(self):\n if self.type ==\"choice\":\n if self.choices is None :\n raise OptionError(\n \"must supply a list of choices for type 'choice'\",self)\n elif not isinstance(self.choices,(tuple,list)):\n raise OptionError(\n \"choices must be a list of strings ('%s' supplied)\"\n %str(type(self.choices)).split(\"'\")[1],self)\n elif self.choices is not None :\n raise OptionError(\n \"must not supply choices for type %r\"%self.type,self)\n \n def _check_dest(self):\n \n \n takes_value=(self.action in self.STORE_ACTIONS or\n self.type is not None )\n if self.dest is None and takes_value:\n \n \n \n if self._long_opts:\n \n self.dest=self._long_opts[0][2:].replace('-','_')\n else :\n self.dest=self._short_opts[0][1]\n \n def _check_const(self):\n if self.action not in self.CONST_ACTIONS and self.const is not None :\n raise OptionError(\n \"'const' must not be supplied for action %r\"%self.action,\n self)\n \n def _check_nargs(self):\n if self.action in self.TYPED_ACTIONS:\n if self.nargs is None :\n self.nargs=1\n elif self.nargs is not None :\n raise OptionError(\n \"'nargs' must not be supplied for action %r\"%self.action,\n self)\n \n def _check_callback(self):\n if self.action ==\"callback\":\n if not callable(self.callback):\n raise OptionError(\n \"callback not callable: %r\"%self.callback,self)\n if (self.callback_args is not None and\n not isinstance(self.callback_args,tuple)):\n raise OptionError(\n \"callback_args, if supplied, must be a tuple: not %r\"\n %self.callback_args,self)\n if (self.callback_kwargs is not None and\n not isinstance(self.callback_kwargs,dict)):\n raise OptionError(\n \"callback_kwargs, if supplied, must be a dict: not %r\"\n %self.callback_kwargs,self)\n else :\n if self.callback is not None :\n raise OptionError(\n \"callback supplied (%r) for non-callback option\"\n %self.callback,self)\n if self.callback_args is not None :\n raise OptionError(\n \"callback_args supplied for non-callback option\",self)\n if self.callback_kwargs is not None :\n raise OptionError(\n \"callback_kwargs supplied for non-callback option\",self)\n \n \n CHECK_METHODS=[_check_action,\n _check_type,\n _check_choice,\n _check_dest,\n _check_const,\n _check_nargs,\n _check_callback]\n \n \n \n \n def __str__(self):\n return\"/\".join(self._short_opts+self._long_opts)\n \n __repr__=_repr\n \n def takes_value(self):\n return self.type is not None\n \n def get_opt_string(self):\n if self._long_opts:\n return self._long_opts[0]\n else :\n return self._short_opts[0]\n \n \n \n \n def check_value(self,opt,value):\n checker=self.TYPE_CHECKER.get(self.type)\n if checker is None :\n return value\n else :\n return checker(self,opt,value)\n \n def convert_value(self,opt,value):\n if value is not None :\n if self.nargs ==1:\n return self.check_value(opt,value)\n else :\n return tuple([self.check_value(opt,v)for v in value])\n \n def process(self,opt,value,values,parser):\n \n \n \n value=self.convert_value(opt,value)\n \n \n \n \n return self.take_action(\n self.action,self.dest,opt,value,values,parser)\n \n def take_action(self,action,dest,opt,value,values,parser):\n if action ==\"store\":\n setattr(values,dest,value)\n elif action ==\"store_const\":\n setattr(values,dest,self.const)\n elif action ==\"store_true\":\n setattr(values,dest,True )\n elif action ==\"store_false\":\n setattr(values,dest,False )\n elif action ==\"append\":\n values.ensure_value(dest,[]).append(value)\n elif action ==\"append_const\":\n values.ensure_value(dest,[]).append(self.const)\n elif action ==\"count\":\n setattr(values,dest,values.ensure_value(dest,0)+1)\n elif action ==\"callback\":\n args=self.callback_args or ()\n kwargs=self.callback_kwargs or {}\n self.callback(self,opt,value,parser,*args,**kwargs)\n elif action ==\"help\":\n parser.print_help()\n parser.exit()\n elif action ==\"version\":\n parser.print_version()\n parser.exit()\n else :\n raise ValueError(\"unknown action %r\"%self.action)\n \n return 1\n \n \n \n \nSUPPRESS_HELP=\"SUPPRESS\"+\"HELP\"\nSUPPRESS_USAGE=\"SUPPRESS\"+\"USAGE\"\n\nclass Values:\n\n def __init__(self,defaults=None ):\n if defaults:\n for (attr,val)in defaults.items():\n setattr(self,attr,val)\n \n def __str__(self):\n return str(self.__dict__)\n \n __repr__=_repr\n \n def __eq__(self,other):\n if isinstance(other,Values):\n return self.__dict__ ==other.__dict__\n elif isinstance(other,dict):\n return self.__dict__ ==other\n else :\n return NotImplemented\n \n def _update_careful(self,dict):\n ''\n\n\n\n\n \n for attr in dir(self):\n if attr in dict:\n dval=dict[attr]\n if dval is not None :\n setattr(self,attr,dval)\n \n def _update_loose(self,dict):\n ''\n\n\n\n \n self.__dict__.update(dict)\n \n def _update(self,dict,mode):\n if mode ==\"careful\":\n self._update_careful(dict)\n elif mode ==\"loose\":\n self._update_loose(dict)\n else :\n raise ValueError(\"invalid update mode: %r\"%mode)\n \n def read_module(self,modname,mode=\"careful\"):\n __import__(modname)\n mod=sys.modules[modname]\n self._update(vars(mod),mode)\n \n def read_file(self,filename,mode=\"careful\"):\n vars={}\n exec(open(filename).read(),vars)\n self._update(vars,mode)\n \n def ensure_value(self,attr,value):\n if not hasattr(self,attr)or getattr(self,attr)is None :\n setattr(self,attr,value)\n return getattr(self,attr)\n \n \nclass OptionContainer:\n\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n \n def __init__(self,option_class,conflict_handler,description):\n \n \n \n \n self._create_option_list()\n \n self.option_class=option_class\n self.set_conflict_handler(conflict_handler)\n self.set_description(description)\n \n def _create_option_mappings(self):\n \n \n \n self._short_opt={}\n self._long_opt={}\n self.defaults={}\n \n \n def _share_option_mappings(self,parser):\n \n \n self._short_opt=parser._short_opt\n self._long_opt=parser._long_opt\n self.defaults=parser.defaults\n \n def set_conflict_handler(self,handler):\n if handler not in (\"error\",\"resolve\"):\n raise ValueError(\"invalid conflict_resolution value %r\"%handler)\n self.conflict_handler=handler\n \n def set_description(self,description):\n self.description=description\n \n def get_description(self):\n return self.description\n \n \n def destroy(self):\n ''\n del self._short_opt\n del self._long_opt\n del self.defaults\n \n \n \n \n def _check_conflict(self,option):\n conflict_opts=[]\n for opt in option._short_opts:\n if opt in self._short_opt:\n conflict_opts.append((opt,self._short_opt[opt]))\n for opt in option._long_opts:\n if opt in self._long_opt:\n conflict_opts.append((opt,self._long_opt[opt]))\n \n if conflict_opts:\n handler=self.conflict_handler\n if handler ==\"error\":\n raise OptionConflictError(\n \"conflicting option string(s): %s\"\n %\", \".join([co[0]for co in conflict_opts]),\n option)\n elif handler ==\"resolve\":\n for (opt,c_option)in conflict_opts:\n if opt.startswith(\"--\"):\n c_option._long_opts.remove(opt)\n del self._long_opt[opt]\n else :\n c_option._short_opts.remove(opt)\n del self._short_opt[opt]\n if not (c_option._short_opts or c_option._long_opts):\n c_option.container.option_list.remove(c_option)\n \n def add_option(self,*args,**kwargs):\n ''\n\n \n if isinstance(args[0],str):\n option=self.option_class(*args,**kwargs)\n elif len(args)==1 and not kwargs:\n option=args[0]\n if not isinstance(option,Option):\n raise TypeError(\"not an Option instance: %r\"%option)\n else :\n raise TypeError(\"invalid arguments\")\n \n self._check_conflict(option)\n \n self.option_list.append(option)\n option.container=self\n for opt in option._short_opts:\n self._short_opt[opt]=option\n for opt in option._long_opts:\n self._long_opt[opt]=option\n \n if option.dest is not None :\n if option.default is not NO_DEFAULT:\n self.defaults[option.dest]=option.default\n elif option.dest not in self.defaults:\n self.defaults[option.dest]=None\n \n return option\n \n def add_options(self,option_list):\n for option in option_list:\n self.add_option(option)\n \n \n \n def get_option(self,opt_str):\n return (self._short_opt.get(opt_str)or\n self._long_opt.get(opt_str))\n \n def has_option(self,opt_str):\n return (opt_str in self._short_opt or\n opt_str in self._long_opt)\n \n def remove_option(self,opt_str):\n option=self._short_opt.get(opt_str)\n if option is None :\n option=self._long_opt.get(opt_str)\n if option is None :\n raise ValueError(\"no such option %r\"%opt_str)\n \n for opt in option._short_opts:\n del self._short_opt[opt]\n for opt in option._long_opts:\n del self._long_opt[opt]\n option.container.option_list.remove(option)\n \n \n \n \n def format_option_help(self,formatter):\n if not self.option_list:\n return\"\"\n result=[]\n for option in self.option_list:\n if not option.help is SUPPRESS_HELP:\n result.append(formatter.format_option(option))\n return\"\".join(result)\n \n def format_description(self,formatter):\n return formatter.format_description(self.get_description())\n \n def format_help(self,formatter):\n result=[]\n if self.description:\n result.append(self.format_description(formatter))\n if self.option_list:\n result.append(self.format_option_help(formatter))\n return\"\\n\".join(result)\n \n \nclass OptionGroup(OptionContainer):\n\n def __init__(self,parser,title,description=None ):\n self.parser=parser\n OptionContainer.__init__(\n self,parser.option_class,parser.conflict_handler,description)\n self.title=title\n \n def _create_option_list(self):\n self.option_list=[]\n self._share_option_mappings(self.parser)\n \n def set_title(self,title):\n self.title=title\n \n def destroy(self):\n ''\n OptionContainer.destroy(self)\n del self.option_list\n \n \n \n def format_help(self,formatter):\n result=formatter.format_heading(self.title)\n formatter.indent()\n result +=OptionContainer.format_help(self,formatter)\n formatter.dedent()\n return result\n \n \nclass OptionParser(OptionContainer):\n\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n \n standard_option_list=[]\n \n def __init__(self,\n usage=None ,\n option_list=None ,\n option_class=Option,\n version=None ,\n conflict_handler=\"error\",\n description=None ,\n formatter=None ,\n add_help_option=True ,\n prog=None ,\n epilog=None ):\n OptionContainer.__init__(\n self,option_class,conflict_handler,description)\n self.set_usage(usage)\n self.prog=prog\n self.version=version\n self.allow_interspersed_args=True\n self.process_default_values=True\n if formatter is None :\n formatter=IndentedHelpFormatter()\n self.formatter=formatter\n self.formatter.set_parser(self)\n self.epilog=epilog\n \n \n \n \n \n self._populate_option_list(option_list,\n add_help=add_help_option)\n \n self._init_parsing_state()\n \n \n def destroy(self):\n ''\n\n\n\n\n \n OptionContainer.destroy(self)\n for group in self.option_groups:\n group.destroy()\n del self.option_list\n del self.option_groups\n del self.formatter\n \n \n \n \n \n def _create_option_list(self):\n self.option_list=[]\n self.option_groups=[]\n self._create_option_mappings()\n \n def _add_help_option(self):\n self.add_option(\"-h\",\"--help\",\n action=\"help\",\n help=_(\"show this help message and exit\"))\n \n def _add_version_option(self):\n self.add_option(\"--version\",\n action=\"version\",\n help=_(\"show program's version number and exit\"))\n \n def _populate_option_list(self,option_list,add_help=True ):\n if self.standard_option_list:\n self.add_options(self.standard_option_list)\n if option_list:\n self.add_options(option_list)\n if self.version:\n self._add_version_option()\n if add_help:\n self._add_help_option()\n \n def _init_parsing_state(self):\n \n self.rargs=None\n self.largs=None\n self.values=None\n \n \n \n \n def set_usage(self,usage):\n if usage is None :\n self.usage=_(\"%prog [options]\")\n elif usage is SUPPRESS_USAGE:\n self.usage=None\n \n elif usage.lower().startswith(\"usage: \"):\n self.usage=usage[7:]\n else :\n self.usage=usage\n \n def enable_interspersed_args(self):\n ''\n\n\n\n \n self.allow_interspersed_args=True\n \n def disable_interspersed_args(self):\n ''\n\n\n\n \n self.allow_interspersed_args=False\n \n def set_process_default_values(self,process):\n self.process_default_values=process\n \n def set_default(self,dest,value):\n self.defaults[dest]=value\n \n def set_defaults(self,**kwargs):\n self.defaults.update(kwargs)\n \n def _get_all_options(self):\n options=self.option_list[:]\n for group in self.option_groups:\n options.extend(group.option_list)\n return options\n \n def get_default_values(self):\n if not self.process_default_values:\n \n return Values(self.defaults)\n \n defaults=self.defaults.copy()\n for option in self._get_all_options():\n default=defaults.get(option.dest)\n if isinstance(default,str):\n opt_str=option.get_opt_string()\n defaults[option.dest]=option.check_value(opt_str,default)\n \n return Values(defaults)\n \n \n \n \n def add_option_group(self,*args,**kwargs):\n \n if isinstance(args[0],str):\n group=OptionGroup(self,*args,**kwargs)\n elif len(args)==1 and not kwargs:\n group=args[0]\n if not isinstance(group,OptionGroup):\n raise TypeError(\"not an OptionGroup instance: %r\"%group)\n if group.parser is not self:\n raise ValueError(\"invalid OptionGroup (wrong parser)\")\n else :\n raise TypeError(\"invalid arguments\")\n \n self.option_groups.append(group)\n return group\n \n def get_option_group(self,opt_str):\n option=(self._short_opt.get(opt_str)or\n self._long_opt.get(opt_str))\n if option and option.container is not self:\n return option.container\n return None\n \n \n \n \n def _get_args(self,args):\n if args is None :\n return sys.argv[1:]\n else :\n return args[:]\n \n def parse_args(self,args=None ,values=None ):\n ''\n\n\n\n\n\n\n\n\n\n\n\n \n rargs=self._get_args(args)\n if values is None :\n values=self.get_default_values()\n \n \n \n \n \n \n \n \n \n \n self.rargs=rargs\n self.largs=largs=[]\n self.values=values\n \n try :\n stop=self._process_args(largs,rargs,values)\n except (BadOptionError,OptionValueError)as err:\n self.error(str(err))\n \n args=largs+rargs\n return self.check_values(values,args)\n \n def check_values(self,values,args):\n ''\n\n\n\n\n\n\n\n\n \n return (values,args)\n \n def _process_args(self,largs,rargs,values):\n ''\n\n\n\n\n\n\n\n \n while rargs:\n arg=rargs[0]\n \n \n \n if arg ==\"--\":\n del rargs[0]\n return\n elif arg[0:2]==\"--\":\n \n self._process_long_opt(rargs,values)\n elif arg[:1]==\"-\"and len(arg)>1:\n \n \n self._process_short_opts(rargs,values)\n elif self.allow_interspersed_args:\n largs.append(arg)\n del rargs[0]\n else :\n return\n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n def _match_long_opt(self,opt):\n ''\n\n\n\n\n \n return _match_abbrev(opt,self._long_opt)\n \n def _process_long_opt(self,rargs,values):\n arg=rargs.pop(0)\n \n \n \n if\"=\"in arg:\n (opt,next_arg)=arg.split(\"=\",1)\n rargs.insert(0,next_arg)\n had_explicit_value=True\n else :\n opt=arg\n had_explicit_value=False\n \n opt=self._match_long_opt(opt)\n option=self._long_opt[opt]\n if option.takes_value():\n nargs=option.nargs\n if len(rargs)'\n'?'\n'@'\n'A'\n'B'\n'C'\n'D'\n'E'\n'F'\n'G'\n'H'\n'I'\n'J'\n'K'\n'L'\n'M'\n'N'\n'O'\n'P'\n'Q'\n'R'\n'S'\n'T'\n'U'\n'V'\n'W'\n'X'\n'Y'\n'Z'\n'['\n'\\\\'\n']'\n'^'\n'_'\n'`'\n'a'\n'b'\n'c'\n'd'\n'e'\n'f'\n'g'\n'h'\n'i'\n'j'\n'k'\n'l'\n'm'\n'n'\n'o'\n'p'\n'q'\n'r'\n's'\n't'\n'u'\n'v'\n'w'\n'x'\n'y'\n'z'\n'{'\n'|'\n'}'\n'~'\n'\\x7f'\n'\\x80'\n'\\x81'\n'\\x82'\n'\\x83'\n'\\x84'\n'\\x85'\n'\\x86'\n'\\x87'\n'\\x88'\n'\\x89'\n'\\x8a'\n'\\x8b'\n'\\x8c'\n'\\x8d'\n'\\x8e'\n'\\x8f'\n'\\x90'\n'\\x91'\n'\\x92'\n'\\x93'\n'\\x94'\n'\\x95'\n'\\x96'\n'\\x97'\n'\\x98'\n'\\x99'\n'\\x9a'\n'\\x9b'\n'\\x9c'\n'\\x9d'\n'\\x9e'\n'\\x9f'\n'\\xa0'\n'\\u0126'\n'\\u02d8'\n'\\xa3'\n'\\xa4'\n'\\ufffe'\n'\\u0124'\n'\\xa7'\n'\\xa8'\n'\\u0130'\n'\\u015e'\n'\\u011e'\n'\\u0134'\n'\\xad'\n'\\ufffe'\n'\\u017b'\n'\\xb0'\n'\\u0127'\n'\\xb2'\n'\\xb3'\n'\\xb4'\n'\\xb5'\n'\\u0125'\n'\\xb7'\n'\\xb8'\n'\\u0131'\n'\\u015f'\n'\\u011f'\n'\\u0135'\n'\\xbd'\n'\\ufffe'\n'\\u017c'\n'\\xc0'\n'\\xc1'\n'\\xc2'\n'\\ufffe'\n'\\xc4'\n'\\u010a'\n'\\u0108'\n'\\xc7'\n'\\xc8'\n'\\xc9'\n'\\xca'\n'\\xcb'\n'\\xcc'\n'\\xcd'\n'\\xce'\n'\\xcf'\n'\\ufffe'\n'\\xd1'\n'\\xd2'\n'\\xd3'\n'\\xd4'\n'\\u0120'\n'\\xd6'\n'\\xd7'\n'\\u011c'\n'\\xd9'\n'\\xda'\n'\\xdb'\n'\\xdc'\n'\\u016c'\n'\\u015c'\n'\\xdf'\n'\\xe0'\n'\\xe1'\n'\\xe2'\n'\\ufffe'\n'\\xe4'\n'\\u010b'\n'\\u0109'\n'\\xe7'\n'\\xe8'\n'\\xe9'\n'\\xea'\n'\\xeb'\n'\\xec'\n'\\xed'\n'\\xee'\n'\\xef'\n'\\ufffe'\n'\\xf1'\n'\\xf2'\n'\\xf3'\n'\\xf4'\n'\\u0121'\n'\\xf6'\n'\\xf7'\n'\\u011d'\n'\\xf9'\n'\\xfa'\n'\\xfb'\n'\\xfc'\n'\\u016d'\n'\\u015d'\n'\\u02d9'\n)\n\n\nencoding_table=codecs.charmap_build(decoding_table)\n"], "cmd": [".py", "''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\nimport string,sys\n\n__all__=[\"Cmd\"]\n\nPROMPT='(Cmd) '\nIDENTCHARS=string.ascii_letters+string.digits+'_'\n\nclass Cmd:\n ''\n\n\n\n\n\n\n\n\n\n \n prompt=PROMPT\n identchars=IDENTCHARS\n ruler='='\n lastcmd=''\n intro=None\n doc_leader=\"\"\n doc_header=\"Documented commands (type help ):\"\n misc_header=\"Miscellaneous help topics:\"\n undoc_header=\"Undocumented commands:\"\n nohelp=\"*** No help on %s\"\n use_rawinput=1\n \n def __init__(self,completekey='tab',stdin=None ,stdout=None ):\n ''\n\n\n\n\n\n\n\n\n \n if stdin is not None :\n self.stdin=stdin\n else :\n self.stdin=sys.stdin\n if stdout is not None :\n self.stdout=stdout\n else :\n self.stdout=sys.stdout\n self.cmdqueue=[]\n self.completekey=completekey\n \n def cmdloop(self,intro=None ):\n ''\n\n\n\n \n \n self.preloop()\n if self.use_rawinput and self.completekey:\n try :\n import readline\n self.old_completer=readline.get_completer()\n readline.set_completer(self.complete)\n readline.parse_and_bind(self.completekey+\": complete\")\n except ImportError:\n pass\n try :\n if intro is not None :\n self.intro=intro\n if self.intro:\n self.stdout.write(str(self.intro)+\"\\n\")\n stop=None\n while not stop:\n if self.cmdqueue:\n line=self.cmdqueue.pop(0)\n else :\n if self.use_rawinput:\n try :\n line=input(self.prompt)\n except EOFError:\n line='EOF'\n else :\n self.stdout.write(self.prompt)\n self.stdout.flush()\n line=self.stdin.readline()\n if not len(line):\n line='EOF'\n else :\n line=line.rstrip('\\r\\n')\n line=self.precmd(line)\n stop=self.onecmd(line)\n stop=self.postcmd(stop,line)\n self.postloop()\n finally :\n if self.use_rawinput and self.completekey:\n try :\n import readline\n readline.set_completer(self.old_completer)\n except ImportError:\n pass\n \n \n def precmd(self,line):\n ''\n\n\n \n return line\n \n def postcmd(self,stop,line):\n ''\n return stop\n \n def preloop(self):\n ''\n pass\n \n def postloop(self):\n ''\n\n\n \n pass\n \n def parseline(self,line):\n ''\n\n\n \n line=line.strip()\n if not line:\n return None ,None ,line\n elif line[0]=='?':\n line='help '+line[1:]\n elif line[0]=='!':\n if hasattr(self,'do_shell'):\n line='shell '+line[1:]\n else :\n return None ,None ,line\n i,n=0,len(line)\n while i 0:\n cmd,args,foo=self.parseline(line)\n if cmd =='':\n compfunc=self.completedefault\n else :\n try :\n compfunc=getattr(self,'complete_'+cmd)\n except AttributeError:\n compfunc=self.completedefault\n else :\n compfunc=self.completenames\n self.completion_matches=compfunc(text,line,begidx,endidx)\n try :\n return self.completion_matches[state]\n except IndexError:\n return None\n \n def get_names(self):\n \n \n return dir(self.__class__)\n \n def complete_help(self,*args):\n commands=set(self.completenames(*args))\n topics=set(a[5:]for a in self.get_names()\n if a.startswith('help_'+args[0]))\n return list(commands |topics)\n \n def do_help(self,arg):\n ''\n if arg:\n \n try :\n func=getattr(self,'help_'+arg)\n except AttributeError:\n try :\n doc=getattr(self,'do_'+arg).__doc__\n if doc:\n self.stdout.write(\"%s\\n\"%str(doc))\n return\n except AttributeError:\n pass\n self.stdout.write(\"%s\\n\"%str(self.nohelp %(arg,)))\n return\n func()\n else :\n names=self.get_names()\n cmds_doc=[]\n cmds_undoc=[]\n help={}\n for name in names:\n if name[:5]=='help_':\n help[name[5:]]=1\n names.sort()\n \n prevname=''\n for name in names:\n if name[:3]=='do_':\n if name ==prevname:\n continue\n prevname=name\n cmd=name[3:]\n if cmd in help:\n cmds_doc.append(cmd)\n del help[cmd]\n elif getattr(self,name).__doc__:\n cmds_doc.append(cmd)\n else :\n cmds_undoc.append(cmd)\n self.stdout.write(\"%s\\n\"%str(self.doc_leader))\n self.print_topics(self.doc_header,cmds_doc,15,80)\n self.print_topics(self.misc_header,list(help.keys()),15,80)\n self.print_topics(self.undoc_header,cmds_undoc,15,80)\n \n def print_topics(self,header,cmds,cmdlen,maxcol):\n if cmds:\n self.stdout.write(\"%s\\n\"%str(header))\n if self.ruler:\n self.stdout.write(\"%s\\n\"%str(self.ruler *len(header)))\n self.columnize(cmds,maxcol -1)\n self.stdout.write(\"\\n\")\n \n def columnize(self,list,displaywidth=80):\n ''\n\n\n\n \n if not list:\n self.stdout.write(\"\\n\")\n return\n \n nonstrings=[i for i in range(len(list))\n if not isinstance(list[i],str)]\n if nonstrings:\n raise TypeError(\"list[i] not a string for i in %s\"\n %\", \".join(map(str,nonstrings)))\n size=len(list)\n if size ==1:\n self.stdout.write('%s\\n'%str(list[0]))\n return\n \n for nrows in range(1,len(list)):\n ncols=(size+nrows -1)//nrows\n colwidths=[]\n totwidth=-2\n for col in range(ncols):\n colwidth=0\n for row in range(nrows):\n i=row+nrows *col\n if i >=size:\n break\n x=list[i]\n colwidth=max(colwidth,len(x))\n colwidths.append(colwidth)\n totwidth +=colwidth+2\n if totwidth >displaywidth:\n break\n if totwidth <=displaywidth:\n break\n else :\n nrows=len(list)\n ncols=1\n colwidths=[0]\n for row in range(nrows):\n texts=[]\n for col in range(ncols):\n i=row+nrows *col\n if i >=size:\n x=\"\"\n else :\n x=list[i]\n texts.append(x)\n while texts and not texts[-1]:\n del texts[-1]\n for col in range(len(texts)):\n texts[col]=texts[col].ljust(colwidths[col])\n self.stdout.write(\"%s\\n\"%str(\" \".join(texts)))\n"], "_random": [".py", "from browser import window,alert\n\ndef _randint(a,b):\n return int(window.Math.random()*(b -a+1)+a)\n \ndef _rand_with_seed(x,rand_obj):\n\n\n degrees=rand_obj._state %360\n x=window.Math.sin(degrees /(2 *window.Math.PI))*10000\n \n \n \n \n \n if not hasattr(rand_obj,'incr'):\n rand_obj.incr=1\n rand_obj._state +=rand_obj.incr\n return x -window.Math.floor(x)\n \ndef _urandom(n,rand_obj=None ):\n ''\n \n \n if rand_obj is None or rand_obj._state is None :\n randbytes=[_randint(0,255)for i in range(n)]\n else :\n randbytes=[]\n for i in range(n):\n randbytes.append(int(256 *_rand_with_seed(i,rand_obj)))\n return bytes(randbytes)\n \nclass Random:\n ''\n\n\n\n\n\n\n\n\n\n\n \n \n \n \n \n \n \n VERSION=3\n \n def __init__(self,x=None ):\n ''\n\n\n \n \n self._state=x\n \n def seed(self,a=None ,version=2):\n ''\n\n\n\n\n\n\n\n\n\n \n \n self._state=a\n self.gauss_next=None\n \n def getstate(self):\n ''\n return self._state\n \n def setstate(self,state):\n ''\n self._state=state\n \n def random(self):\n ''\n return window.Math.random()\n \n def getrandbits(self,k):\n ''\n if k <=0:\n raise ValueError('number of bits must be greater than zero')\n if k !=int(k):\n raise TypeError('number of bits should be an integer')\n numbytes=(k+7)//8\n x=int.from_bytes(_urandom(numbytes,self),'big')\n \n return x >>(numbytes *8 -k)\n"], "encodings.mac_centeuro": [".py", "''\n\n\n\nimport codecs\n\n\n\nclass Codec(codecs.Codec):\n\n def encode(self,input,errors='strict'):\n return codecs.charmap_encode(input,errors,encoding_table)\n \n def decode(self,input,errors='strict'):\n return codecs.charmap_decode(input,errors,decoding_table)\n \nclass IncrementalEncoder(codecs.IncrementalEncoder):\n def encode(self,input,final=False ):\n return codecs.charmap_encode(input,self.errors,encoding_table)[0]\n \nclass IncrementalDecoder(codecs.IncrementalDecoder):\n def decode(self,input,final=False ):\n return codecs.charmap_decode(input,self.errors,decoding_table)[0]\n \nclass StreamWriter(Codec,codecs.StreamWriter):\n pass\n \nclass StreamReader(Codec,codecs.StreamReader):\n pass\n \n \n \ndef getregentry():\n return codecs.CodecInfo(\n name='mac-centeuro',\n encode=Codec().encode,\n decode=Codec().decode,\n incrementalencoder=IncrementalEncoder,\n incrementaldecoder=IncrementalDecoder,\n streamreader=StreamReader,\n streamwriter=StreamWriter,\n )\n \n \n \n \ndecoding_table=(\n'\\x00'\n'\\x01'\n'\\x02'\n'\\x03'\n'\\x04'\n'\\x05'\n'\\x06'\n'\\x07'\n'\\x08'\n'\\t'\n'\\n'\n'\\x0b'\n'\\x0c'\n'\\r'\n'\\x0e'\n'\\x0f'\n'\\x10'\n'\\x11'\n'\\x12'\n'\\x13'\n'\\x14'\n'\\x15'\n'\\x16'\n'\\x17'\n'\\x18'\n'\\x19'\n'\\x1a'\n'\\x1b'\n'\\x1c'\n'\\x1d'\n'\\x1e'\n'\\x1f'\n' '\n'!'\n'\"'\n'#'\n'$'\n'%'\n'&'\n\"'\"\n'('\n')'\n'*'\n'+'\n','\n'-'\n'.'\n'/'\n'0'\n'1'\n'2'\n'3'\n'4'\n'5'\n'6'\n'7'\n'8'\n'9'\n':'\n';'\n'<'\n'='\n'>'\n'?'\n'@'\n'A'\n'B'\n'C'\n'D'\n'E'\n'F'\n'G'\n'H'\n'I'\n'J'\n'K'\n'L'\n'M'\n'N'\n'O'\n'P'\n'Q'\n'R'\n'S'\n'T'\n'U'\n'V'\n'W'\n'X'\n'Y'\n'Z'\n'['\n'\\\\'\n']'\n'^'\n'_'\n'`'\n'a'\n'b'\n'c'\n'd'\n'e'\n'f'\n'g'\n'h'\n'i'\n'j'\n'k'\n'l'\n'm'\n'n'\n'o'\n'p'\n'q'\n'r'\n's'\n't'\n'u'\n'v'\n'w'\n'x'\n'y'\n'z'\n'{'\n'|'\n'}'\n'~'\n'\\x7f'\n'\\xc4'\n'\\u0100'\n'\\u0101'\n'\\xc9'\n'\\u0104'\n'\\xd6'\n'\\xdc'\n'\\xe1'\n'\\u0105'\n'\\u010c'\n'\\xe4'\n'\\u010d'\n'\\u0106'\n'\\u0107'\n'\\xe9'\n'\\u0179'\n'\\u017a'\n'\\u010e'\n'\\xed'\n'\\u010f'\n'\\u0112'\n'\\u0113'\n'\\u0116'\n'\\xf3'\n'\\u0117'\n'\\xf4'\n'\\xf6'\n'\\xf5'\n'\\xfa'\n'\\u011a'\n'\\u011b'\n'\\xfc'\n'\\u2020'\n'\\xb0'\n'\\u0118'\n'\\xa3'\n'\\xa7'\n'\\u2022'\n'\\xb6'\n'\\xdf'\n'\\xae'\n'\\xa9'\n'\\u2122'\n'\\u0119'\n'\\xa8'\n'\\u2260'\n'\\u0123'\n'\\u012e'\n'\\u012f'\n'\\u012a'\n'\\u2264'\n'\\u2265'\n'\\u012b'\n'\\u0136'\n'\\u2202'\n'\\u2211'\n'\\u0142'\n'\\u013b'\n'\\u013c'\n'\\u013d'\n'\\u013e'\n'\\u0139'\n'\\u013a'\n'\\u0145'\n'\\u0146'\n'\\u0143'\n'\\xac'\n'\\u221a'\n'\\u0144'\n'\\u0147'\n'\\u2206'\n'\\xab'\n'\\xbb'\n'\\u2026'\n'\\xa0'\n'\\u0148'\n'\\u0150'\n'\\xd5'\n'\\u0151'\n'\\u014c'\n'\\u2013'\n'\\u2014'\n'\\u201c'\n'\\u201d'\n'\\u2018'\n'\\u2019'\n'\\xf7'\n'\\u25ca'\n'\\u014d'\n'\\u0154'\n'\\u0155'\n'\\u0158'\n'\\u2039'\n'\\u203a'\n'\\u0159'\n'\\u0156'\n'\\u0157'\n'\\u0160'\n'\\u201a'\n'\\u201e'\n'\\u0161'\n'\\u015a'\n'\\u015b'\n'\\xc1'\n'\\u0164'\n'\\u0165'\n'\\xcd'\n'\\u017d'\n'\\u017e'\n'\\u016a'\n'\\xd3'\n'\\xd4'\n'\\u016b'\n'\\u016e'\n'\\xda'\n'\\u016f'\n'\\u0170'\n'\\u0171'\n'\\u0172'\n'\\u0173'\n'\\xdd'\n'\\xfd'\n'\\u0137'\n'\\u017b'\n'\\u0141'\n'\\u017c'\n'\\u0122'\n'\\u02c7'\n)\n\n\nencoding_table=codecs.charmap_build(decoding_table)\n"], "builtins": [".js", "var $module = (function(){\n var obj = {__class__:__BRYTHON__.$ModuleDict,__name__:'builtins'}\n var builtin_names = ['ArithmeticError', 'AssertionError', 'AttributeError', \n 'BaseException', 'BlockingIOError', 'BrokenPipeError', 'BufferError', \n 'BytesWarning', 'ChildProcessError', 'ConnectionAbortedError', \n 'ConnectionError', 'ConnectionRefusedError', 'ConnectionResetError', \n 'DeprecationWarning', 'EOFError', 'Ellipsis', 'EnvironmentError', 'Exception', \n 'False', 'FileExistsError', 'FileNotFoundError', 'FloatingPointError', \n 'FutureWarning', 'GeneratorExit', 'IOError', 'ImportError', 'ImportWarning', \n 'IndentationError', 'IndexError', 'InterruptedError', 'IsADirectoryError',\n 'KeyError', 'KeyboardInterrupt', 'LookupError', 'MemoryError', 'NameError', \n 'None', 'NotADirectoryError', 'NotImplemented', 'NotImplementedError', \n 'OSError', 'OverflowError', 'PendingDeprecationWarning', 'PermissionError', \n 'ProcessLookupError', 'ReferenceError', 'ResourceWarning', 'RuntimeError', \n 'RuntimeWarning', 'StopIteration', 'SyntaxError', 'SyntaxWarning', \n 'SystemError', 'SystemExit', 'TabError', 'TimeoutError', 'True', 'TypeError', \n 'UnboundLocalError', 'UnicodeDecodeError', 'UnicodeEncodeError', \n 'UnicodeError', 'UnicodeTranslateError', 'UnicodeWarning', 'UserWarning', \n 'ValueError', 'Warning', 'WindowsError', 'ZeroDivisionError', '_', \n '__build_class__', '__debug__', '__doc__', '__import__', '__name__', \n '__package__', 'abs', 'all', 'any', 'ascii', 'bin', 'bool', 'bytearray', \n 'bytes','callable', 'chr', 'classmethod', 'compile', 'complex', 'copyright', \n 'credits','delattr', 'dict', 'dir', 'divmod', 'enumerate', 'eval', 'exec', \n 'exit', 'filter', 'float', 'format', 'frozenset', 'getattr', 'globals', \n 'hasattr', 'hash', 'help', 'hex', 'id', 'input', 'int', 'isinstance', \n 'issubclass', 'iter', 'len', 'license', 'list', 'locals', 'map', 'max', \n 'memoryview', 'min', 'next', 'object', 'oct', 'open', 'ord', 'pow', 'print', \n 'property', 'quit', 'range', 'repr', 'reversed', 'round', 'set', 'setattr', \n 'slice', 'sorted', 'staticmethod', 'str', 'sum', 'super', 'tuple', 'type', \n 'vars', 'zip']\n for(var i=0, _len_i = builtin_names.length; i < _len_i;i++){\n try{eval(\"obj['\"+builtin_names[i]+\"']=__BRYTHON__.builtins.\"+builtin_names[i])}\n catch(err){if (__BRYTHON__.$debug) {console.log(err)}}\n }\n return obj\n})()\n"], "browser": [".py", "import javascript\n\nfrom _browser import *\n\nfrom .local_storage import LocalStorage\nfrom .session_storage import SessionStorage\nfrom .object_storage import ObjectStorage\n\nWebSocket=window.WebSocket.new", 1], "encodings.cp863": [".py", "''\n\n\n\nimport codecs\n\n\n\nclass Codec(codecs.Codec):\n\n def encode(self,input,errors='strict'):\n return codecs.charmap_encode(input,errors,encoding_map)\n \n def decode(self,input,errors='strict'):\n return codecs.charmap_decode(input,errors,decoding_table)\n \nclass IncrementalEncoder(codecs.IncrementalEncoder):\n def encode(self,input,final=False ):\n return codecs.charmap_encode(input,self.errors,encoding_map)[0]\n \nclass IncrementalDecoder(codecs.IncrementalDecoder):\n def decode(self,input,final=False ):\n return codecs.charmap_decode(input,self.errors,decoding_table)[0]\n \nclass StreamWriter(Codec,codecs.StreamWriter):\n pass\n \nclass StreamReader(Codec,codecs.StreamReader):\n pass\n \n \n \ndef getregentry():\n return codecs.CodecInfo(\n name='cp863',\n encode=Codec().encode,\n decode=Codec().decode,\n incrementalencoder=IncrementalEncoder,\n incrementaldecoder=IncrementalDecoder,\n streamreader=StreamReader,\n streamwriter=StreamWriter,\n )\n \n \n \ndecoding_map=codecs.make_identity_dict(range(256))\ndecoding_map.update({\n0x0080:0x00c7,\n0x0081:0x00fc,\n0x0082:0x00e9,\n0x0083:0x00e2,\n0x0084:0x00c2,\n0x0085:0x00e0,\n0x0086:0x00b6,\n0x0087:0x00e7,\n0x0088:0x00ea,\n0x0089:0x00eb,\n0x008a:0x00e8,\n0x008b:0x00ef,\n0x008c:0x00ee,\n0x008d:0x2017,\n0x008e:0x00c0,\n0x008f:0x00a7,\n0x0090:0x00c9,\n0x0091:0x00c8,\n0x0092:0x00ca,\n0x0093:0x00f4,\n0x0094:0x00cb,\n0x0095:0x00cf,\n0x0096:0x00fb,\n0x0097:0x00f9,\n0x0098:0x00a4,\n0x0099:0x00d4,\n0x009a:0x00dc,\n0x009b:0x00a2,\n0x009c:0x00a3,\n0x009d:0x00d9,\n0x009e:0x00db,\n0x009f:0x0192,\n0x00a0:0x00a6,\n0x00a1:0x00b4,\n0x00a2:0x00f3,\n0x00a3:0x00fa,\n0x00a4:0x00a8,\n0x00a5:0x00b8,\n0x00a6:0x00b3,\n0x00a7:0x00af,\n0x00a8:0x00ce,\n0x00a9:0x2310,\n0x00aa:0x00ac,\n0x00ab:0x00bd,\n0x00ac:0x00bc,\n0x00ad:0x00be,\n0x00ae:0x00ab,\n0x00af:0x00bb,\n0x00b0:0x2591,\n0x00b1:0x2592,\n0x00b2:0x2593,\n0x00b3:0x2502,\n0x00b4:0x2524,\n0x00b5:0x2561,\n0x00b6:0x2562,\n0x00b7:0x2556,\n0x00b8:0x2555,\n0x00b9:0x2563,\n0x00ba:0x2551,\n0x00bb:0x2557,\n0x00bc:0x255d,\n0x00bd:0x255c,\n0x00be:0x255b,\n0x00bf:0x2510,\n0x00c0:0x2514,\n0x00c1:0x2534,\n0x00c2:0x252c,\n0x00c3:0x251c,\n0x00c4:0x2500,\n0x00c5:0x253c,\n0x00c6:0x255e,\n0x00c7:0x255f,\n0x00c8:0x255a,\n0x00c9:0x2554,\n0x00ca:0x2569,\n0x00cb:0x2566,\n0x00cc:0x2560,\n0x00cd:0x2550,\n0x00ce:0x256c,\n0x00cf:0x2567,\n0x00d0:0x2568,\n0x00d1:0x2564,\n0x00d2:0x2565,\n0x00d3:0x2559,\n0x00d4:0x2558,\n0x00d5:0x2552,\n0x00d6:0x2553,\n0x00d7:0x256b,\n0x00d8:0x256a,\n0x00d9:0x2518,\n0x00da:0x250c,\n0x00db:0x2588,\n0x00dc:0x2584,\n0x00dd:0x258c,\n0x00de:0x2590,\n0x00df:0x2580,\n0x00e0:0x03b1,\n0x00e1:0x00df,\n0x00e2:0x0393,\n0x00e3:0x03c0,\n0x00e4:0x03a3,\n0x00e5:0x03c3,\n0x00e6:0x00b5,\n0x00e7:0x03c4,\n0x00e8:0x03a6,\n0x00e9:0x0398,\n0x00ea:0x03a9,\n0x00eb:0x03b4,\n0x00ec:0x221e,\n0x00ed:0x03c6,\n0x00ee:0x03b5,\n0x00ef:0x2229,\n0x00f0:0x2261,\n0x00f1:0x00b1,\n0x00f2:0x2265,\n0x00f3:0x2264,\n0x00f4:0x2320,\n0x00f5:0x2321,\n0x00f6:0x00f7,\n0x00f7:0x2248,\n0x00f8:0x00b0,\n0x00f9:0x2219,\n0x00fa:0x00b7,\n0x00fb:0x221a,\n0x00fc:0x207f,\n0x00fd:0x00b2,\n0x00fe:0x25a0,\n0x00ff:0x00a0,\n})\n\n\n\ndecoding_table=(\n'\\x00'\n'\\x01'\n'\\x02'\n'\\x03'\n'\\x04'\n'\\x05'\n'\\x06'\n'\\x07'\n'\\x08'\n'\\t'\n'\\n'\n'\\x0b'\n'\\x0c'\n'\\r'\n'\\x0e'\n'\\x0f'\n'\\x10'\n'\\x11'\n'\\x12'\n'\\x13'\n'\\x14'\n'\\x15'\n'\\x16'\n'\\x17'\n'\\x18'\n'\\x19'\n'\\x1a'\n'\\x1b'\n'\\x1c'\n'\\x1d'\n'\\x1e'\n'\\x1f'\n' '\n'!'\n'\"'\n'#'\n'$'\n'%'\n'&'\n\"'\"\n'('\n')'\n'*'\n'+'\n','\n'-'\n'.'\n'/'\n'0'\n'1'\n'2'\n'3'\n'4'\n'5'\n'6'\n'7'\n'8'\n'9'\n':'\n';'\n'<'\n'='\n'>'\n'?'\n'@'\n'A'\n'B'\n'C'\n'D'\n'E'\n'F'\n'G'\n'H'\n'I'\n'J'\n'K'\n'L'\n'M'\n'N'\n'O'\n'P'\n'Q'\n'R'\n'S'\n'T'\n'U'\n'V'\n'W'\n'X'\n'Y'\n'Z'\n'['\n'\\\\'\n']'\n'^'\n'_'\n'`'\n'a'\n'b'\n'c'\n'd'\n'e'\n'f'\n'g'\n'h'\n'i'\n'j'\n'k'\n'l'\n'm'\n'n'\n'o'\n'p'\n'q'\n'r'\n's'\n't'\n'u'\n'v'\n'w'\n'x'\n'y'\n'z'\n'{'\n'|'\n'}'\n'~'\n'\\x7f'\n'\\xc7'\n'\\xfc'\n'\\xe9'\n'\\xe2'\n'\\xc2'\n'\\xe0'\n'\\xb6'\n'\\xe7'\n'\\xea'\n'\\xeb'\n'\\xe8'\n'\\xef'\n'\\xee'\n'\\u2017'\n'\\xc0'\n'\\xa7'\n'\\xc9'\n'\\xc8'\n'\\xca'\n'\\xf4'\n'\\xcb'\n'\\xcf'\n'\\xfb'\n'\\xf9'\n'\\xa4'\n'\\xd4'\n'\\xdc'\n'\\xa2'\n'\\xa3'\n'\\xd9'\n'\\xdb'\n'\\u0192'\n'\\xa6'\n'\\xb4'\n'\\xf3'\n'\\xfa'\n'\\xa8'\n'\\xb8'\n'\\xb3'\n'\\xaf'\n'\\xce'\n'\\u2310'\n'\\xac'\n'\\xbd'\n'\\xbc'\n'\\xbe'\n'\\xab'\n'\\xbb'\n'\\u2591'\n'\\u2592'\n'\\u2593'\n'\\u2502'\n'\\u2524'\n'\\u2561'\n'\\u2562'\n'\\u2556'\n'\\u2555'\n'\\u2563'\n'\\u2551'\n'\\u2557'\n'\\u255d'\n'\\u255c'\n'\\u255b'\n'\\u2510'\n'\\u2514'\n'\\u2534'\n'\\u252c'\n'\\u251c'\n'\\u2500'\n'\\u253c'\n'\\u255e'\n'\\u255f'\n'\\u255a'\n'\\u2554'\n'\\u2569'\n'\\u2566'\n'\\u2560'\n'\\u2550'\n'\\u256c'\n'\\u2567'\n'\\u2568'\n'\\u2564'\n'\\u2565'\n'\\u2559'\n'\\u2558'\n'\\u2552'\n'\\u2553'\n'\\u256b'\n'\\u256a'\n'\\u2518'\n'\\u250c'\n'\\u2588'\n'\\u2584'\n'\\u258c'\n'\\u2590'\n'\\u2580'\n'\\u03b1'\n'\\xdf'\n'\\u0393'\n'\\u03c0'\n'\\u03a3'\n'\\u03c3'\n'\\xb5'\n'\\u03c4'\n'\\u03a6'\n'\\u0398'\n'\\u03a9'\n'\\u03b4'\n'\\u221e'\n'\\u03c6'\n'\\u03b5'\n'\\u2229'\n'\\u2261'\n'\\xb1'\n'\\u2265'\n'\\u2264'\n'\\u2320'\n'\\u2321'\n'\\xf7'\n'\\u2248'\n'\\xb0'\n'\\u2219'\n'\\xb7'\n'\\u221a'\n'\\u207f'\n'\\xb2'\n'\\u25a0'\n'\\xa0'\n)\n\n\n\nencoding_map={\n0x0000:0x0000,\n0x0001:0x0001,\n0x0002:0x0002,\n0x0003:0x0003,\n0x0004:0x0004,\n0x0005:0x0005,\n0x0006:0x0006,\n0x0007:0x0007,\n0x0008:0x0008,\n0x0009:0x0009,\n0x000a:0x000a,\n0x000b:0x000b,\n0x000c:0x000c,\n0x000d:0x000d,\n0x000e:0x000e,\n0x000f:0x000f,\n0x0010:0x0010,\n0x0011:0x0011,\n0x0012:0x0012,\n0x0013:0x0013,\n0x0014:0x0014,\n0x0015:0x0015,\n0x0016:0x0016,\n0x0017:0x0017,\n0x0018:0x0018,\n0x0019:0x0019,\n0x001a:0x001a,\n0x001b:0x001b,\n0x001c:0x001c,\n0x001d:0x001d,\n0x001e:0x001e,\n0x001f:0x001f,\n0x0020:0x0020,\n0x0021:0x0021,\n0x0022:0x0022,\n0x0023:0x0023,\n0x0024:0x0024,\n0x0025:0x0025,\n0x0026:0x0026,\n0x0027:0x0027,\n0x0028:0x0028,\n0x0029:0x0029,\n0x002a:0x002a,\n0x002b:0x002b,\n0x002c:0x002c,\n0x002d:0x002d,\n0x002e:0x002e,\n0x002f:0x002f,\n0x0030:0x0030,\n0x0031:0x0031,\n0x0032:0x0032,\n0x0033:0x0033,\n0x0034:0x0034,\n0x0035:0x0035,\n0x0036:0x0036,\n0x0037:0x0037,\n0x0038:0x0038,\n0x0039:0x0039,\n0x003a:0x003a,\n0x003b:0x003b,\n0x003c:0x003c,\n0x003d:0x003d,\n0x003e:0x003e,\n0x003f:0x003f,\n0x0040:0x0040,\n0x0041:0x0041,\n0x0042:0x0042,\n0x0043:0x0043,\n0x0044:0x0044,\n0x0045:0x0045,\n0x0046:0x0046,\n0x0047:0x0047,\n0x0048:0x0048,\n0x0049:0x0049,\n0x004a:0x004a,\n0x004b:0x004b,\n0x004c:0x004c,\n0x004d:0x004d,\n0x004e:0x004e,\n0x004f:0x004f,\n0x0050:0x0050,\n0x0051:0x0051,\n0x0052:0x0052,\n0x0053:0x0053,\n0x0054:0x0054,\n0x0055:0x0055,\n0x0056:0x0056,\n0x0057:0x0057,\n0x0058:0x0058,\n0x0059:0x0059,\n0x005a:0x005a,\n0x005b:0x005b,\n0x005c:0x005c,\n0x005d:0x005d,\n0x005e:0x005e,\n0x005f:0x005f,\n0x0060:0x0060,\n0x0061:0x0061,\n0x0062:0x0062,\n0x0063:0x0063,\n0x0064:0x0064,\n0x0065:0x0065,\n0x0066:0x0066,\n0x0067:0x0067,\n0x0068:0x0068,\n0x0069:0x0069,\n0x006a:0x006a,\n0x006b:0x006b,\n0x006c:0x006c,\n0x006d:0x006d,\n0x006e:0x006e,\n0x006f:0x006f,\n0x0070:0x0070,\n0x0071:0x0071,\n0x0072:0x0072,\n0x0073:0x0073,\n0x0074:0x0074,\n0x0075:0x0075,\n0x0076:0x0076,\n0x0077:0x0077,\n0x0078:0x0078,\n0x0079:0x0079,\n0x007a:0x007a,\n0x007b:0x007b,\n0x007c:0x007c,\n0x007d:0x007d,\n0x007e:0x007e,\n0x007f:0x007f,\n0x00a0:0x00ff,\n0x00a2:0x009b,\n0x00a3:0x009c,\n0x00a4:0x0098,\n0x00a6:0x00a0,\n0x00a7:0x008f,\n0x00a8:0x00a4,\n0x00ab:0x00ae,\n0x00ac:0x00aa,\n0x00af:0x00a7,\n0x00b0:0x00f8,\n0x00b1:0x00f1,\n0x00b2:0x00fd,\n0x00b3:0x00a6,\n0x00b4:0x00a1,\n0x00b5:0x00e6,\n0x00b6:0x0086,\n0x00b7:0x00fa,\n0x00b8:0x00a5,\n0x00bb:0x00af,\n0x00bc:0x00ac,\n0x00bd:0x00ab,\n0x00be:0x00ad,\n0x00c0:0x008e,\n0x00c2:0x0084,\n0x00c7:0x0080,\n0x00c8:0x0091,\n0x00c9:0x0090,\n0x00ca:0x0092,\n0x00cb:0x0094,\n0x00ce:0x00a8,\n0x00cf:0x0095,\n0x00d4:0x0099,\n0x00d9:0x009d,\n0x00db:0x009e,\n0x00dc:0x009a,\n0x00df:0x00e1,\n0x00e0:0x0085,\n0x00e2:0x0083,\n0x00e7:0x0087,\n0x00e8:0x008a,\n0x00e9:0x0082,\n0x00ea:0x0088,\n0x00eb:0x0089,\n0x00ee:0x008c,\n0x00ef:0x008b,\n0x00f3:0x00a2,\n0x00f4:0x0093,\n0x00f7:0x00f6,\n0x00f9:0x0097,\n0x00fa:0x00a3,\n0x00fb:0x0096,\n0x00fc:0x0081,\n0x0192:0x009f,\n0x0393:0x00e2,\n0x0398:0x00e9,\n0x03a3:0x00e4,\n0x03a6:0x00e8,\n0x03a9:0x00ea,\n0x03b1:0x00e0,\n0x03b4:0x00eb,\n0x03b5:0x00ee,\n0x03c0:0x00e3,\n0x03c3:0x00e5,\n0x03c4:0x00e7,\n0x03c6:0x00ed,\n0x2017:0x008d,\n0x207f:0x00fc,\n0x2219:0x00f9,\n0x221a:0x00fb,\n0x221e:0x00ec,\n0x2229:0x00ef,\n0x2248:0x00f7,\n0x2261:0x00f0,\n0x2264:0x00f3,\n0x2265:0x00f2,\n0x2310:0x00a9,\n0x2320:0x00f4,\n0x2321:0x00f5,\n0x2500:0x00c4,\n0x2502:0x00b3,\n0x250c:0x00da,\n0x2510:0x00bf,\n0x2514:0x00c0,\n0x2518:0x00d9,\n0x251c:0x00c3,\n0x2524:0x00b4,\n0x252c:0x00c2,\n0x2534:0x00c1,\n0x253c:0x00c5,\n0x2550:0x00cd,\n0x2551:0x00ba,\n0x2552:0x00d5,\n0x2553:0x00d6,\n0x2554:0x00c9,\n0x2555:0x00b8,\n0x2556:0x00b7,\n0x2557:0x00bb,\n0x2558:0x00d4,\n0x2559:0x00d3,\n0x255a:0x00c8,\n0x255b:0x00be,\n0x255c:0x00bd,\n0x255d:0x00bc,\n0x255e:0x00c6,\n0x255f:0x00c7,\n0x2560:0x00cc,\n0x2561:0x00b5,\n0x2562:0x00b6,\n0x2563:0x00b9,\n0x2564:0x00d1,\n0x2565:0x00d2,\n0x2566:0x00cb,\n0x2567:0x00cf,\n0x2568:0x00d0,\n0x2569:0x00ca,\n0x256a:0x00d8,\n0x256b:0x00d7,\n0x256c:0x00ce,\n0x2580:0x00df,\n0x2584:0x00dc,\n0x2588:0x00db,\n0x258c:0x00dd,\n0x2590:0x00de,\n0x2591:0x00b0,\n0x2592:0x00b1,\n0x2593:0x00b2,\n0x25a0:0x00fe,\n}\n"], "gettext": [".py", "''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\nimport locale,copy,io,os,re,struct,sys\nfrom errno import ENOENT\n\n\n__all__=['NullTranslations','GNUTranslations','Catalog',\n'find','translation','install','textdomain','bindtextdomain',\n'dgettext','dngettext','gettext','ngettext',\n]\n\n_default_localedir=os.path.join(sys.base_prefix,'share','locale')\n\n\ndef c2py(plural):\n ''\n\n \n \n import token,tokenize\n tokens=tokenize.generate_tokens(io.StringIO(plural).readline)\n try :\n danger=[x for x in tokens if x[0]==token.NAME and x[1]!='n']\n except tokenize.TokenError:\n raise ValueError('plural forms expression error, maybe unbalanced parenthesis')\n else :\n if danger:\n raise ValueError('plural forms expression could be dangerous')\n \n \n plural=plural.replace('&&',' and ')\n plural=plural.replace('||',' or ')\n \n expr=re.compile(r'\\!([^=])')\n plural=expr.sub(' not \\\\1',plural)\n \n \n \n expr=re.compile(r'(.*?)\\?(.*?):(.*)')\n def repl(x):\n return\"(%s if %s else %s)\"%(x.group(2),x.group(1),\n expr.sub(repl,x.group(3)))\n \n \n stack=['']\n for c in plural:\n if c =='(':\n stack.append('')\n elif c ==')':\n if len(stack)==1:\n \n \n \n raise ValueError('unbalanced parenthesis in plural form')\n s=expr.sub(repl,stack.pop())\n stack[-1]+='(%s)'%s\n else :\n stack[-1]+=c\n plural=expr.sub(repl,stack.pop())\n \n return eval('lambda n: int(%s)'%plural)\n \n \n \ndef _expand_lang(loc):\n loc=locale.normalize(loc)\n COMPONENT_CODESET=1 <<0\n COMPONENT_TERRITORY=1 <<1\n COMPONENT_MODIFIER=1 <<2\n \n mask=0\n pos=loc.find('@')\n if pos >=0:\n modifier=loc[pos:]\n loc=loc[:pos]\n mask |=COMPONENT_MODIFIER\n else :\n modifier=''\n pos=loc.find('.')\n if pos >=0:\n codeset=loc[pos:]\n loc=loc[:pos]\n mask |=COMPONENT_CODESET\n else :\n codeset=''\n pos=loc.find('_')\n if pos >=0:\n territory=loc[pos:]\n loc=loc[:pos]\n mask |=COMPONENT_TERRITORY\n else :\n territory=''\n language=loc\n ret=[]\n for i in range(mask+1):\n if not (i&~mask):\n val=language\n if i&COMPONENT_TERRITORY:val +=territory\n if i&COMPONENT_CODESET:val +=codeset\n if i&COMPONENT_MODIFIER:val +=modifier\n ret.append(val)\n ret.reverse()\n return ret\n \n \n \nclass NullTranslations:\n def __init__(self,fp=None ):\n self._info={}\n self._charset=None\n self._output_charset=None\n self._fallback=None\n if fp is not None :\n self._parse(fp)\n \n def _parse(self,fp):\n pass\n \n def add_fallback(self,fallback):\n if self._fallback:\n self._fallback.add_fallback(fallback)\n else :\n self._fallback=fallback\n \n def gettext(self,message):\n if self._fallback:\n return self._fallback.gettext(message)\n return message\n \n def lgettext(self,message):\n if self._fallback:\n return self._fallback.lgettext(message)\n return message\n \n def ngettext(self,msgid1,msgid2,n):\n if self._fallback:\n return self._fallback.ngettext(msgid1,msgid2,n)\n if n ==1:\n return msgid1\n else :\n return msgid2\n \n def lngettext(self,msgid1,msgid2,n):\n if self._fallback:\n return self._fallback.lngettext(msgid1,msgid2,n)\n if n ==1:\n return msgid1\n else :\n return msgid2\n \n def info(self):\n return self._info\n \n def charset(self):\n return self._charset\n \n def output_charset(self):\n return self._output_charset\n \n def set_output_charset(self,charset):\n self._output_charset=charset\n \n def install(self,names=None ):\n import builtins\n builtins.__dict__['_']=self.gettext\n if hasattr(names,\"__contains__\"):\n if\"gettext\"in names:\n builtins.__dict__['gettext']=builtins.__dict__['_']\n if\"ngettext\"in names:\n builtins.__dict__['ngettext']=self.ngettext\n if\"lgettext\"in names:\n builtins.__dict__['lgettext']=self.lgettext\n if\"lngettext\"in names:\n builtins.__dict__['lngettext']=self.lngettext\n \n \nclass GNUTranslations(NullTranslations):\n\n LE_MAGIC=0x950412de\n BE_MAGIC=0xde120495\n \n def _parse(self,fp):\n ''\n unpack=struct.unpack\n filename=getattr(fp,'name','')\n \n \n self._catalog=catalog={}\n self.plural=lambda n:int(n !=1)\n buf=fp.read()\n buflen=len(buf)\n \n magic=unpack('4I',buf[4:20])\n ii='>II'\n else :\n raise OSError(0,'Bad magic number',filename)\n \n \n for i in range(0,msgcount):\n mlen,moff=unpack(ii,buf[masteridx:masteridx+8])\n mend=moff+mlen\n tlen,toff=unpack(ii,buf[transidx:transidx+8])\n tend=toff+tlen\n if mend self.maxlen:\n self.popleft()\n \n def appendleft(self,x):\n self.state +=1\n self.leftndx -=1\n if self.leftndx ==-1:\n newblock=[None ]*BLOCKSIZ\n self.left[LFTLNK]=newblock\n newblock[RGTLNK]=self.left\n self.left=newblock\n self.leftndx=n -1\n self.length +=1\n self.left[self.leftndx]=x\n if self.maxlen is not None and self.length >self.maxlen:\n self.pop()\n \n def extend(self,iterable):\n if iterable is self:\n iterable=list(iterable)\n for elem in iterable:\n self.append(elem)\n \n def extendleft(self,iterable):\n if iterable is self:\n iterable=list(iterable)\n for elem in iterable:\n self.appendleft(elem)\n \n def pop(self):\n if self.left is self.right and self.leftndx >self.rightndx:\n \n raise IndexError(\"pop from an empty deque\")\n x=self.right[self.rightndx]\n self.right[self.rightndx]=None\n self.length -=1\n self.rightndx -=1\n self.state +=1\n if self.rightndx ==-1:\n prevblock=self.right[LFTLNK]\n if prevblock is None :\n \n self.rightndx=n //2\n self.leftndx=n //2+1\n else :\n prevblock[RGTLNK]=None\n self.right[LFTLNK]=None\n self.right=prevblock\n self.rightndx=n -1\n return x\n \n def popleft(self):\n if self.left is self.right and self.leftndx >self.rightndx:\n \n raise IndexError(\"pop from an empty deque\")\n x=self.left[self.leftndx]\n self.left[self.leftndx]=None\n self.length -=1\n self.leftndx +=1\n self.state +=1\n if self.leftndx ==n:\n prevblock=self.left[RGTLNK]\n if prevblock is None :\n \n self.rightndx=n //2\n self.leftndx=n //2+1\n else :\n prevblock[LFTLNK]=None\n self.left[RGTLNK]=None\n self.left=prevblock\n self.leftndx=0\n return x\n \n def count(self,value):\n c=0\n for item in self:\n if item ==value:\n c +=1\n return c\n \n def remove(self,value):\n \n for i in range(len(self)):\n if self[i]==value:\n del self[i]\n return\n raise ValueError(\"deque.remove(x): x not in deque\")\n \n def rotate(self,n=1):\n length=len(self)\n if length ==0:\n return\n halflen=(length+1)>>1\n if n >halflen or n <-halflen:\n n %=length\n if n >halflen:\n n -=length\n elif n <-halflen:\n n +=length\n while n >0:\n self.appendleft(self.pop())\n n -=1\n while n <0:\n self.append(self.popleft())\n n +=1\n \n def reverse(self):\n ''\n leftblock=self.left\n rightblock=self.right\n leftindex=self.leftndx\n rightindex=self.rightndx\n for i in range(self.length //2):\n \n assert leftblock !=rightblock or leftindex =0:\n block=self.left\n while block:\n l,r=0,n\n if block is self.left:\n l=self.leftndx\n if block is self.right:\n r=self.rightndx+1\n span=r -l\n if index =negative_span:\n return block,r+index\n index -=negative_span\n block=block[LFTLNK]\n raise IndexError(\"deque index out of range\")\n \n def __getitem__(self,index):\n block,index=self.__getref(index)\n return block[index]\n \n def __setitem__(self,index,value):\n block,index=self.__getref(index)\n block[index]=value\n \n def __delitem__(self,index):\n length=len(self)\n if index >=0:\n if index >=length:\n raise IndexError(\"deque index out of range\")\n self.rotate(-index)\n self.popleft()\n self.rotate(index)\n else :\n \n index=index ^(2 **31)\n if index >=length:\n raise IndexError(\"deque index out of range\")\n self.rotate(index)\n self.pop()\n self.rotate(-index)\n \n def __reduce_ex__(self,proto):\n return type(self),(list(self),self.maxlen)\n \n def __hash__(self):\n \n raise TypeError(\"deque objects are unhashable\")\n \n def __copy__(self):\n return self.__class__(self,self.maxlen)\n \n \n def __eq__(self,other):\n if isinstance(other,deque):\n return list(self)==list(other)\n else :\n return NotImplemented\n \n def __ne__(self,other):\n if isinstance(other,deque):\n return list(self)!=list(other)\n else :\n return NotImplemented\n \n def __lt__(self,other):\n if isinstance(other,deque):\n return list(self)list(other)\n else :\n return NotImplemented\n \n def __ge__(self,other):\n if isinstance(other,deque):\n return list(self)>=list(other)\n else :\n return NotImplemented\n \n def __iadd__(self,other):\n self.extend(other)\n return self\n \n \nclass deque_iterator(object):\n\n def __init__(self,deq,itergen):\n self.counter=len(deq)\n def giveup():\n self.counter=0\n \n raise RuntimeError(\"deque mutated during iteration\")\n self._gen=itergen(deq.state,giveup)\n \n def next(self):\n res=self._gen.next()\n self.counter -=1\n return res\n \n def __iter__(self):\n return self\n \nclass defaultdict(dict):\n\n def __init__(self,*args,**kwds):\n if len(args)>0:\n default_factory=args[0]\n args=args[1:]\n if not callable(default_factory)and default_factory is not None :\n raise TypeError(\"first argument must be callable\")\n else :\n default_factory=None\n dict.__init__(self,args,kwds)\n self.default_factory=default_factory\n self.update(args,kwds)\n super(defaultdict,self).__init__(*args,**kwds)\n \n \n \n \n \n \n \n \n def __missing__(self,key):\n \n if self.default_factory is None :\n raise KeyError(key)\n self[key]=value=self.default_factory()\n return value\n \n def __repr__(self,recurse=set()):\n if id(self)in recurse:\n return\"defaultdict(...)\"\n try :\n recurse.add(id(self))\n return\"defaultdict(%s, %s)\"%(repr(self.default_factory),super(defaultdict,self).__repr__())\n finally :\n recurse.remove(id(self))\n \n def copy(self):\n return type(self)(self.default_factory,self)\n \n def __copy__(self):\n return self.copy()\n \n def __reduce__(self):\n \n \n \n \n \n \n \n \n \n \n \n return (type(self),(self.default_factory,),None ,None ,self.items())\n \nfrom operator import itemgetter as _itemgetter\nfrom keyword import iskeyword as _iskeyword\nimport sys as _sys\n\ndef namedtuple(typename,field_names,verbose=False ,rename=False ):\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n \n \n \n if isinstance(field_names,str):\n field_names=field_names.replace(',',' ').split()\n field_names=tuple(map(str,field_names))\n if rename:\n names=list(field_names)\n seen=set()\n for i,name in enumerate(names):\n if (not min(c.isalnum()or c =='_'for c in name)or _iskeyword(name)\n or not name or name[0].isdigit()or name.startswith('_')\n or name in seen):\n names[i]='_%d'%i\n seen.add(name)\n field_names=tuple(names)\n for name in (typename,)+field_names:\n if not min(c.isalnum()or c =='_'for c in name):\n raise ValueError('Type names and field names can only contain alphanumeric characters and underscores: %r'%name)\n if _iskeyword(name):\n raise ValueError('Type names and field names cannot be a keyword: %r'%name)\n if name[0].isdigit():\n raise ValueError('Type names and field names cannot start with a number: %r'%name)\n seen_names=set()\n for name in field_names:\n if name.startswith('_')and not rename:\n raise ValueError('Field names cannot start with an underscore: %r'%name)\n if name in seen_names:\n raise ValueError('Encountered duplicate field name: %r'%name)\n seen_names.add(name)\n \n \n numfields=len(field_names)\n argtxt=repr(field_names).replace(\"'\",\"\")[1:-1]\n reprtxt=', '.join('%s=%%r'%name for name in field_names)\n \n template='''class %(typename)s(tuple):\n '%(typename)s(%(argtxt)s)' \\n\n __slots__ = () \\n\n _fields = %(field_names)r \\n\n def __new__(_cls, %(argtxt)s):\n return tuple.__new__(_cls, (%(argtxt)s)) \\n\n @classmethod\n def _make(cls, iterable, new=tuple.__new__, len=len):\n 'Make a new %(typename)s object from a sequence or iterable'\n result = new(cls, iterable)\n if len(result) != %(numfields)d:\n raise TypeError('Expected %(numfields)d arguments, got %%d' %% len(result))\n return result \\n\n def __repr__(self):\n return '%(typename)s(%(reprtxt)s)' %% self \\n\n def _asdict(self):\n 'Return a new dict which maps field names to their values'\n return dict(zip(self._fields, self)) \\n\n def _replace(_self, **kwds):\n 'Return a new %(typename)s object replacing specified fields with new values'\n result = _self._make(map(kwds.pop, %(field_names)r, _self))\n if kwds:\n raise ValueError('Got unexpected field names: %%r' %% kwds.keys())\n return result \\n\n def __getnewargs__(self):\n return tuple(self) \\n\\n'''%locals()\n for i,name in enumerate(field_names):\n template +=' %s = _property(_itemgetter(%d))\\n'%(name,i)\n \n if verbose:\n print(template)\n \n \n namespace=dict(_itemgetter=_itemgetter,__name__='namedtuple_%s'%typename,\n _property=property,_tuple=tuple)\n try :\n exec(template,namespace)\n except SyntaxError as e:\n raise SyntaxError(e.message+':\\n'+template)\n result=namespace[typename]\n \n \n \n \n \n try :\n result.__module__=_sys._getframe(1).f_globals.get('__name__','__main__')\n except (AttributeError,ValueError):\n pass\n \n return result\n \nif __name__ =='__main__':\n Point=namedtuple('Point',['x','y'])\n p=Point(11,y=22)\n print(p[0]+p[1])\n x,y=p\n print(x,y)\n print(p.x+p.y)\n print(p)\n"], "logging": [".py", "\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\"\"\"\nLogging package for Python. Based on PEP 282 and comments thereto in\ncomp.lang.python.\n\nCopyright (C) 2001-2013 Vinay Sajip. All Rights Reserved.\n\nTo use, simply 'import logging' and log away!\n\"\"\"\n\nimport sys,os,time,io,traceback,warnings,weakref\nfrom string import Template\nfrom browser import console\n\n__all__=['BASIC_FORMAT','BufferingFormatter','CRITICAL','DEBUG','ERROR',\n'FATAL','FileHandler','Filter','Formatter','Handler','INFO',\n'LogRecord','Logger','LoggerAdapter','NOTSET','NullHandler',\n'StreamHandler','ConsoleHandler','WARN','WARNING','addLevelName','basicConfig',\n'captureWarnings','critical','debug','disable','error',\n'exception','fatal','getLevelName','getLogger','getLoggerClass',\n'info','log','makeLogRecord','setLoggerClass','warn','warning',\n'getLogRecordFactory','setLogRecordFactory','lastResort']\n\ntry :\n import threading\nexcept ImportError:\n threading=None\n \n__author__=\"Vinay Sajip \"\n__status__=\"production\"\n__version__=\"0.5.1.2\"\n__date__=\"07 February 2010\"\n\n\n\n\n\n\n\n\n\nif hasattr(sys,'frozen'):\n _srcfile=\"logging%s__init__%s\"%(os.sep,__file__[-4:])\nelse :\n _srcfile=__file__\n_srcfile=os.path.normcase(_srcfile)\n\n\nif hasattr(sys,'_getframe'):\n currentframe=lambda :sys._getframe(3)\nelse :\n def currentframe():\n ''\n try :\n raise Exception\n except :\n return sys.exc_info()[2].tb_frame.f_back\n \n \n \n \n \n \n \n \n \n \n \n_startTime=time.time()\n\n\n\n\n\nraiseExceptions=True\n\n\n\n\nlogThreads=True\n\n\n\n\nlogMultiprocessing=True\n\n\n\n\nlogProcesses=True\n\n\n\n\n\n\n\n\n\n\n\n\nCRITICAL=50\nFATAL=CRITICAL\nERROR=40\nWARNING=30\nWARN=WARNING\nINFO=20\nDEBUG=10\nNOTSET=0\n\n_levelNames={\nCRITICAL:'CRITICAL',\nERROR:'ERROR',\nWARNING:'WARNING',\nINFO:'INFO',\nDEBUG:'DEBUG',\nNOTSET:'NOTSET',\n'CRITICAL':CRITICAL,\n'ERROR':ERROR,\n'WARN':WARNING,\n'WARNING':WARNING,\n'INFO':INFO,\n'DEBUG':DEBUG,\n'NOTSET':NOTSET,\n}\n\ndef getLevelName(level):\n ''\n\n\n\n\n\n\n\n\n\n\n\n \n return _levelNames.get(level,(\"Level %s\"%level))\n \ndef addLevelName(level,levelName):\n ''\n\n\n\n \n _acquireLock()\n try :\n _levelNames[level]=levelName\n _levelNames[levelName]=level\n finally :\n _releaseLock()\n \ndef _checkLevel(level):\n if isinstance(level,int):\n rv=level\n elif str(level)==level:\n if level not in _levelNames:\n raise ValueError(\"Unknown level: %r\"%level)\n rv=_levelNames[level]\n else :\n raise TypeError(\"Level not an integer or a valid string: %r\"%level)\n return rv\n \n \n \n \n \n \n \n \n \n \n \n \n \nif threading:\n _lock=threading.RLock()\nelse :\n _lock=None\n \n \ndef _acquireLock():\n ''\n\n\n\n \n if _lock:\n _lock.acquire()\n \ndef _releaseLock():\n ''\n\n \n if _lock:\n _lock.release()\n \n \n \n \n \nclass LogRecord(object):\n ''\n\n\n\n\n\n\n\n\n\n \n def __init__(self,name,level,pathname,lineno,\n msg,args,exc_info,func=None ,sinfo=None ,**kwargs):\n ''\n\n \n ct=time.time()\n self.name=name\n self.msg=msg\n \n \n \n \n \n \n \n \n \n \n \n \n \n if args and len(args)==1 and isinstance(args[0],dict)and args[0]:\n args=args[0]\n self.args=args\n self.levelname=getLevelName(level)\n self.levelno=level\n self.pathname=pathname\n try :\n self.filename=os.path.basename(pathname)\n self.module=os.path.splitext(self.filename)[0]\n except (TypeError,ValueError,AttributeError):\n self.filename=pathname\n self.module=\"Unknown module\"\n self.exc_info=exc_info\n self.exc_text=None\n self.stack_info=sinfo\n self.lineno=lineno\n self.funcName=func\n self.created=ct\n self.msecs=(ct -int(ct))*1000\n self.relativeCreated=(self.created -_startTime)*1000\n if logThreads and threading:\n self.thread=threading.get_ident()\n self.threadName=threading.current_thread().name\n else :\n self.thread=None\n self.threadName=None\n if not logMultiprocessing:\n self.processName=None\n else :\n self.processName='MainProcess'\n mp=sys.modules.get('multiprocessing')\n if mp is not None :\n \n \n \n \n try :\n self.processName=mp.current_process().name\n except Exception:\n pass\n if logProcesses and hasattr(os,'getpid'):\n self.process=os.getpid()\n else :\n self.process=None\n \n def __str__(self):\n return''%(self.name,self.levelno,\n self.pathname,self.lineno,self.msg)\n \n def getMessage(self):\n ''\n\n\n\n\n \n msg=str(self.msg)\n if self.args:\n msg=msg %self.args\n return msg\n \n \n \n \n_logRecordFactory=LogRecord\n\ndef setLogRecordFactory(factory):\n ''\n\n\n\n\n \n global _logRecordFactory\n _logRecordFactory=factory\n \ndef getLogRecordFactory():\n ''\n\n \n \n return _logRecordFactory\n \ndef makeLogRecord(dict):\n ''\n\n\n\n\n \n rv=_logRecordFactory(None ,None ,\"\",0,\"\",(),None ,None )\n rv.__dict__.update(dict)\n return rv\n \n \n \n \n \nclass PercentStyle(object):\n\n default_format='%(message)s'\n asctime_format='%(asctime)s'\n asctime_search='%(asctime)'\n \n def __init__(self,fmt):\n self._fmt=fmt or self.default_format\n \n def usesTime(self):\n return self._fmt.find(self.asctime_search)>=0\n \n def format(self,record):\n return self._fmt %record.__dict__\n \nclass StrFormatStyle(PercentStyle):\n default_format='{message}'\n asctime_format='{asctime}'\n asctime_search='{asctime'\n \n def format(self,record):\n return self._fmt.format(**record.__dict__)\n \n \nclass StringTemplateStyle(PercentStyle):\n default_format='${message}'\n asctime_format='${asctime}'\n asctime_search='${asctime}'\n \n def __init__(self,fmt):\n self._fmt=fmt or self.default_format\n self._tpl=Template(self._fmt)\n \n def usesTime(self):\n fmt=self._fmt\n return fmt.find('$asctime')>=0 or fmt.find(self.asctime_format)>=0\n \n def format(self,record):\n return self._tpl.substitute(**record.__dict__)\n \n_STYLES={\n'%':PercentStyle,\n'{':StrFormatStyle,\n'$':StringTemplateStyle\n}\n\nclass Formatter(object):\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n \n converter=time.localtime\n \n def __init__(self,fmt=None ,datefmt=None ,style='%'):\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n \n if style not in _STYLES:\n raise ValueError('Style must be one of: %s'%','.join(\n _STYLES.keys()))\n self._style=_STYLES[style](fmt)\n self._fmt=self._style._fmt\n self.datefmt=datefmt\n \n default_time_format='%Y-%m-%d %H:%M:%S'\n default_msec_format='%s,%03d'\n \n def formatTime(self,record,datefmt=None ):\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n ct=self.converter(record.created)\n if datefmt:\n s=time.strftime(datefmt,ct)\n else :\n t=time.strftime(self.default_time_format,ct)\n s=self.default_msec_format %(t,record.msecs)\n return s\n \n def formatException(self,ei):\n ''\n\n\n\n\n \n sio=io.StringIO()\n tb=ei[2]\n \n \n \n traceback.print_exc(file=sio)\n s=sio.getvalue()\n sio.close()\n if s[-1:]==\"\\n\":\n s=s[:-1]\n return s\n \n def usesTime(self):\n ''\n\n \n return self._style.usesTime()\n \n def formatMessage(self,record):\n return self._style.format(record)\n \n def formatStack(self,stack_info):\n ''\n\n\n\n\n\n\n\n\n \n return stack_info\n \n def format(self,record):\n ''\n\n\n\n\n\n\n\n\n\n\n \n record.message=record.getMessage()\n if self.usesTime():\n record.asctime=self.formatTime(record,self.datefmt)\n s=self.formatMessage(record)\n if record.exc_info:\n \n \n if not record.exc_text:\n record.exc_text=self.formatException(record.exc_info)\n if record.exc_text:\n if s[-1:]!=\"\\n\":\n s=s+\"\\n\"\n s=s+record.exc_text\n if record.stack_info:\n if s[-1:]!=\"\\n\":\n s=s+\"\\n\"\n s=s+self.formatStack(record.stack_info)\n return s\n \n \n \n \n_defaultFormatter=Formatter()\n\nclass BufferingFormatter(object):\n ''\n\n \n def __init__(self,linefmt=None ):\n ''\n\n\n \n if linefmt:\n self.linefmt=linefmt\n else :\n self.linefmt=_defaultFormatter\n \n def formatHeader(self,records):\n ''\n\n \n return\"\"\n \n def formatFooter(self,records):\n ''\n\n \n return\"\"\n \n def format(self,records):\n ''\n\n \n rv=\"\"\n if len(records)>0:\n rv=rv+self.formatHeader(records)\n for record in records:\n rv=rv+self.linefmt.format(record)\n rv=rv+self.formatFooter(records)\n return rv\n \n \n \n \n \nclass Filter(object):\n ''\n\n\n\n\n\n\n\n\n \n def __init__(self,name=''):\n ''\n\n\n\n\n\n \n self.name=name\n self.nlen=len(name)\n \n def filter(self,record):\n ''\n\n\n\n\n \n if self.nlen ==0:\n return True\n elif self.name ==record.name:\n return True\n elif record.name.find(self.name,0,self.nlen)!=0:\n return False\n return (record.name[self.nlen]==\".\")\n \nclass Filterer(object):\n ''\n\n\n \n def __init__(self):\n ''\n\n \n self.filters=[]\n \n def addFilter(self,filter):\n ''\n\n \n if not (filter in self.filters):\n self.filters.append(filter)\n \n def removeFilter(self,filter):\n ''\n\n \n if filter in self.filters:\n self.filters.remove(filter)\n \n def filter(self,record):\n ''\n\n\n\n\n\n\n\n\n\n \n rv=True\n for f in self.filters:\n if hasattr(f,'filter'):\n result=f.filter(record)\n else :\n result=f(record)\n if not result:\n rv=False\n break\n return rv\n \n \n \n \n \n_handlers=weakref.WeakValueDictionary()\n_handlerList=[]\n\ndef _removeHandlerRef(wr):\n ''\n\n \n \n \n \n if (_acquireLock is not None and _handlerList is not None and\n _releaseLock is not None ):\n _acquireLock()\n try :\n if wr in _handlerList:\n _handlerList.remove(wr)\n finally :\n _releaseLock()\n \ndef _addHandlerRef(handler):\n ''\n\n \n _acquireLock()\n try :\n _handlerList.append(weakref.ref(handler,_removeHandlerRef))\n finally :\n _releaseLock()\n \nclass Handler(Filterer):\n ''\n\n\n\n\n\n\n \n def __init__(self,level=NOTSET):\n ''\n\n\n \n Filterer.__init__(self)\n self._name=None\n self.level=_checkLevel(level)\n self.formatter=None\n \n _addHandlerRef(self)\n self.createLock()\n \n def get_name(self):\n return self._name\n \n def set_name(self,name):\n _acquireLock()\n try :\n if self._name in _handlers:\n del _handlers[self._name]\n self._name=name\n if name:\n _handlers[name]=self\n finally :\n _releaseLock()\n \n name=property(get_name,set_name)\n \n def createLock(self):\n ''\n\n \n if threading:\n self.lock=threading.RLock()\n else :\n self.lock=None\n \n def acquire(self):\n ''\n\n \n if self.lock:\n self.lock.acquire()\n \n def release(self):\n ''\n\n \n if self.lock:\n self.lock.release()\n \n def setLevel(self,level):\n ''\n\n \n self.level=_checkLevel(level)\n \n def format(self,record):\n ''\n\n\n\n\n \n if self.formatter:\n fmt=self.formatter\n else :\n fmt=_defaultFormatter\n return fmt.format(record)\n \n def emit(self,record):\n ''\n\n\n\n\n \n raise NotImplementedError('emit must be implemented '\n 'by Handler subclasses')\n \n def handle(self,record):\n ''\n\n\n\n\n\n\n \n rv=self.filter(record)\n if rv:\n self.acquire()\n try :\n self.emit(record)\n finally :\n self.release()\n return rv\n \n def setFormatter(self,fmt):\n ''\n\n \n self.formatter=fmt\n \n def flush(self):\n ''\n\n\n\n\n \n pass\n \n def close(self):\n ''\n\n\n\n\n\n\n \n \n _acquireLock()\n try :\n if self._name and self._name in _handlers:\n del _handlers[self._name]\n finally :\n _releaseLock()\n \n def handleError(self,record):\n ''\n\n\n\n\n\n\n\n\n\n \n if raiseExceptions and sys.stderr:\n try :\n traceback.print_exc(file=sys.stderr)\n sys.stderr.write('Logged from file %s, line %s\\n'%(\n record.filename,record.lineno))\n except IOError:\n pass\n \nclass StreamHandler(Handler):\n ''\n\n\n\n \n \n terminator='\\n'\n \n def __init__(self,stream=None ):\n ''\n\n\n\n \n Handler.__init__(self)\n if stream is None :\n stream=sys.stderr\n self.stream=stream\n \n def flush(self):\n ''\n\n \n self.acquire()\n try :\n if self.stream and hasattr(self.stream,\"flush\"):\n self.stream.flush()\n finally :\n self.release()\n \n def emit(self,record):\n ''\n\n\n\n\n\n\n\n\n \n try :\n msg=self.format(record)\n stream=self.stream\n stream.write(msg)\n stream.write(self.terminator)\n self.flush()\n except (KeyboardInterrupt,SystemExit):\n raise\n except :\n self.handleError(record)\n \nclass FileHandler(StreamHandler):\n ''\n\n \n def __init__(self,filename,mode='a',encoding=None ,delay=False ):\n ''\n\n \n \n \n self.baseFilename=os.path.abspath(filename)\n self.mode=mode\n self.encoding=encoding\n self.delay=delay\n if delay:\n \n \n Handler.__init__(self)\n self.stream=None\n else :\n StreamHandler.__init__(self,self._open())\n \n def close(self):\n ''\n\n \n self.acquire()\n try :\n if self.stream:\n self.flush()\n if hasattr(self.stream,\"close\"):\n self.stream.close()\n StreamHandler.close(self)\n self.stream=None\n finally :\n self.release()\n \n def _open(self):\n ''\n\n\n \n return open(self.baseFilename,self.mode,encoding=self.encoding)\n \n def emit(self,record):\n ''\n\n\n\n\n \n if self.stream is None :\n self.stream=self._open()\n StreamHandler.emit(self,record)\n \nclass _StderrHandler(StreamHandler):\n ''\n\n\n\n \n def __init__(self,level=NOTSET):\n ''\n\n \n Handler.__init__(self,level)\n \n @property\n def stream(self):\n return sys.stderr\n \n \nclass ConsoleHandler(Handler):\n ''\n\n\n \n \n def emit(self,record):\n ''\n\n\n\n\n\n\n \n try :\n msg=self.format(record)\n console.log(msg)\n except :\n self.handleError(record)\n \n_defaultLastResort=ConsoleHandler(WARNING)\nlastResort=_defaultLastResort\n\n\n\n\n\nclass PlaceHolder(object):\n ''\n\n\n\n \n def __init__(self,alogger):\n ''\n\n \n self.loggerMap={alogger:None }\n \n def append(self,alogger):\n ''\n\n \n if alogger not in self.loggerMap:\n self.loggerMap[alogger]=None\n \n \n \n \n_loggerClass=None\n\ndef setLoggerClass(klass):\n ''\n\n\n\n \n if klass !=Logger:\n if not issubclass(klass,Logger):\n raise TypeError(\"logger not derived from logging.Logger: \"\n +klass.__name__)\n global _loggerClass\n _loggerClass=klass\n \ndef getLoggerClass():\n ''\n\n \n \n return _loggerClass\n \nclass Manager(object):\n ''\n\n\n \n def __init__(self,rootnode):\n ''\n\n \n self.root=rootnode\n self.disable=0\n self.emittedNoHandlerWarning=False\n self.loggerDict={}\n self.loggerClass=None\n self.logRecordFactory=None\n \n def getLogger(self,name):\n ''\n\n\n\n\n\n\n\n\n \n rv=None\n if not isinstance(name,str):\n raise TypeError('A logger name must be a string')\n _acquireLock()\n try :\n if name in self.loggerDict:\n rv=self.loggerDict[name]\n if isinstance(rv,PlaceHolder):\n ph=rv\n rv=(self.loggerClass or _loggerClass)(name)\n rv.manager=self\n self.loggerDict[name]=rv\n self._fixupChildren(ph,rv)\n self._fixupParents(rv)\n else :\n rv=(self.loggerClass or _loggerClass)(name)\n rv.manager=self\n self.loggerDict[name]=rv\n self._fixupParents(rv)\n finally :\n _releaseLock()\n return rv\n \n def setLoggerClass(self,klass):\n ''\n\n \n if klass !=Logger:\n if not issubclass(klass,Logger):\n raise TypeError(\"logger not derived from logging.Logger: \"\n +klass.__name__)\n self.loggerClass=klass\n \n def setLogRecordFactory(self,factory):\n ''\n\n\n \n self.logRecordFactory=factory\n \n def _fixupParents(self,alogger):\n ''\n\n\n \n name=alogger.name\n i=name.rfind(\".\")\n rv=None\n while (i >0)and not rv:\n substr=name[:i]\n if substr not in self.loggerDict:\n self.loggerDict[substr]=PlaceHolder(alogger)\n else :\n obj=self.loggerDict[substr]\n if isinstance(obj,Logger):\n rv=obj\n else :\n assert isinstance(obj,PlaceHolder)\n obj.append(alogger)\n i=name.rfind(\".\",0,i -1)\n if not rv:\n rv=self.root\n alogger.parent=rv\n \n def _fixupChildren(self,ph,alogger):\n ''\n\n\n \n name=alogger.name\n namelen=len(name)\n for c in ph.loggerMap.keys():\n \n if c.parent.name[:namelen]!=name:\n alogger.parent=c.parent\n c.parent=alogger\n \n \n \n \n \nclass Logger(Filterer):\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n \n def __init__(self,name,level=NOTSET):\n ''\n\n \n Filterer.__init__(self)\n self.name=name\n self.level=_checkLevel(level)\n self.parent=None\n self.propagate=True\n self.handlers=[]\n self.disabled=False\n \n def setLevel(self,level):\n ''\n\n \n self.level=_checkLevel(level)\n \n def debug(self,msg,*args,**kwargs):\n ''\n\n\n\n\n\n\n \n if self.isEnabledFor(DEBUG):\n self._log(DEBUG,msg,args,**kwargs)\n \n def info(self,msg,*args,**kwargs):\n ''\n\n\n\n\n\n\n \n if self.isEnabledFor(INFO):\n self._log(INFO,msg,args,**kwargs)\n \n def warning(self,msg,*args,**kwargs):\n ''\n\n\n\n\n\n\n \n if self.isEnabledFor(WARNING):\n self._log(WARNING,msg,args,**kwargs)\n \n def warn(self,msg,*args,**kwargs):\n warnings.warn(\"The 'warn' method is deprecated, \"\n \"use 'warning' instead\",DeprecationWarning,2)\n self.warning(msg,*args,**kwargs)\n \n def error(self,msg,*args,**kwargs):\n ''\n\n\n\n\n\n\n \n if self.isEnabledFor(ERROR):\n self._log(ERROR,msg,args,**kwargs)\n \n def exception(self,msg,*args,**kwargs):\n ''\n\n \n kwargs['exc_info']=True\n self.error(msg,*args,**kwargs)\n \n def critical(self,msg,*args,**kwargs):\n ''\n\n\n\n\n\n\n \n if self.isEnabledFor(CRITICAL):\n self._log(CRITICAL,msg,args,**kwargs)\n \n fatal=critical\n \n def log(self,level,msg,*args,**kwargs):\n ''\n\n\n\n\n\n\n \n if not isinstance(level,int):\n if raiseExceptions:\n raise TypeError(\"level must be an integer\")\n else :\n return\n if self.isEnabledFor(level):\n self._log(level,msg,args,**kwargs)\n \n def findCaller(self,stack_info=False ):\n ''\n\n\n \n f=currentframe()\n \n \n if f is not None :\n f=f.f_back\n rv=\"(unknown file)\",0,\"(unknown function)\",None\n while hasattr(f,\"f_code\"):\n co=f.f_code\n filename=os.path.normcase(co.co_filename)\n if filename ==_srcfile:\n f=f.f_back\n continue\n sinfo=None\n if stack_info:\n sio=io.StringIO()\n sio.write('Stack (most recent call last):\\n')\n traceback.print_stack(f,file=sio)\n sinfo=sio.getvalue()\n if sinfo[-1]=='\\n':\n sinfo=sinfo[:-1]\n sio.close()\n rv=(co.co_filename,f.f_lineno,co.co_name,sinfo)\n break\n return rv\n \n def makeRecord(self,name,level,fn,lno,msg,args,exc_info,\n func=None ,extra=None ,sinfo=None ):\n ''\n\n\n \n rv=_logRecordFactory(name,level,fn,lno,msg,args,exc_info,func,\n sinfo)\n if extra is not None :\n for key in extra:\n if (key in [\"message\",\"asctime\"])or (key in rv.__dict__):\n raise KeyError(\"Attempt to overwrite %r in LogRecord\"%key)\n rv.__dict__[key]=extra[key]\n return rv\n \n def _log(self,level,msg,args,exc_info=None ,extra=None ,stack_info=False ):\n ''\n\n\n \n sinfo=None\n if _srcfile:\n \n \n \n try :\n fn,lno,func,sinfo=self.findCaller(stack_info)\n except ValueError:\n fn,lno,func=\"(unknown file)\",0,\"(unknown function)\"\n else :\n fn,lno,func=\"(unknown file)\",0,\"(unknown function)\"\n if exc_info:\n if not isinstance(exc_info,tuple):\n exc_info=sys.exc_info()\n record=self.makeRecord(self.name,level,fn,lno,msg,args,\n exc_info,func,extra,sinfo)\n self.handle(record)\n \n def handle(self,record):\n ''\n\n\n\n\n \n if (not self.disabled)and self.filter(record):\n self.callHandlers(record)\n \n def addHandler(self,hdlr):\n ''\n\n \n _acquireLock()\n try :\n if not (hdlr in self.handlers):\n self.handlers.append(hdlr)\n finally :\n _releaseLock()\n \n def removeHandler(self,hdlr):\n ''\n\n \n _acquireLock()\n try :\n if hdlr in self.handlers:\n self.handlers.remove(hdlr)\n finally :\n _releaseLock()\n \n def hasHandlers(self):\n ''\n\n\n\n\n\n\n\n \n c=self\n rv=False\n while c:\n if c.handlers:\n rv=True\n break\n if not c.propagate:\n break\n else :\n c=c.parent\n return rv\n \n def callHandlers(self,record):\n ''\n\n\n\n\n\n\n\n \n c=self\n found=0\n while c:\n for hdlr in c.handlers:\n found=found+1\n if record.levelno >=hdlr.level:\n hdlr.handle(record)\n if not c.propagate:\n c=None\n else :\n c=c.parent\n if (found ==0):\n if lastResort:\n if record.levelno >=lastResort.level:\n lastResort.handle(record)\n elif raiseExceptions and not self.manager.emittedNoHandlerWarning:\n sys.stderr.write(\"No handlers could be found for logger\"\n \" \\\"%s\\\"\\n\"%self.name)\n self.manager.emittedNoHandlerWarning=True\n \n def getEffectiveLevel(self):\n ''\n\n\n\n\n \n logger=self\n while logger:\n if logger.level:\n return logger.level\n logger=logger.parent\n return NOTSET\n \n def isEnabledFor(self,level):\n ''\n\n \n if self.manager.disable >=level:\n return False\n return level >=self.getEffectiveLevel()\n \n def getChild(self,suffix):\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n \n if self.root is not self:\n suffix='.'.join((self.name,suffix))\n return self.manager.getLogger(suffix)\n \nclass RootLogger(Logger):\n ''\n\n\n\n \n def __init__(self,level):\n ''\n\n \n Logger.__init__(self,\"root\",level)\n \n_loggerClass=Logger\n\nclass LoggerAdapter(object):\n ''\n\n\n \n \n def __init__(self,logger,extra):\n ''\n\n\n\n\n\n\n\n\n \n self.logger=logger\n self.extra=extra\n \n def process(self,msg,kwargs):\n ''\n\n\n\n\n\n\n\n \n kwargs[\"extra\"]=self.extra\n return msg,kwargs\n \n \n \n \n def debug(self,msg,*args,**kwargs):\n ''\n\n \n self.log(DEBUG,msg,*args,**kwargs)\n \n def info(self,msg,*args,**kwargs):\n ''\n\n \n self.log(INFO,msg,*args,**kwargs)\n \n def warning(self,msg,*args,**kwargs):\n ''\n\n \n self.log(WARNING,msg,*args,**kwargs)\n \n def warn(self,msg,*args,**kwargs):\n warnings.warn(\"The 'warn' method is deprecated, \"\n \"use 'warning' instead\",DeprecationWarning,2)\n self.warning(msg,*args,**kwargs)\n \n def error(self,msg,*args,**kwargs):\n ''\n\n \n self.log(ERROR,msg,*args,**kwargs)\n \n def exception(self,msg,*args,**kwargs):\n ''\n\n \n kwargs[\"exc_info\"]=True\n self.log(ERROR,msg,*args,**kwargs)\n \n def critical(self,msg,*args,**kwargs):\n ''\n\n \n self.log(CRITICAL,msg,*args,**kwargs)\n \n def log(self,level,msg,*args,**kwargs):\n ''\n\n\n \n if self.isEnabledFor(level):\n msg,kwargs=self.process(msg,kwargs)\n self.logger._log(level,msg,args,**kwargs)\n \n def isEnabledFor(self,level):\n ''\n\n \n if self.logger.manager.disable >=level:\n return False\n return level >=self.getEffectiveLevel()\n \n def setLevel(self,level):\n ''\n\n \n self.logger.setLevel(level)\n \n def getEffectiveLevel(self):\n ''\n\n \n return self.logger.getEffectiveLevel()\n \n def hasHandlers(self):\n ''\n\n \n return self.logger.hasHandlers()\n \nroot=RootLogger(WARNING)\nLogger.root=root\nLogger.manager=Manager(Logger.root)\n\n\n\n\n\nBASIC_FORMAT=\"%(levelname)s:%(name)s:%(message)s\"\n\ndef basicConfig(**kwargs):\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n \n \n _acquireLock()\n try :\n if len(root.handlers)==0:\n handlers=kwargs.get(\"handlers\")\n if handlers is None :\n if\"stream\"in kwargs and\"filename\"in kwargs:\n raise ValueError(\"'stream' and 'filename' should not be \"\n \"specified together\")\n else :\n if\"stream\"in kwargs or\"filename\"in kwargs:\n raise ValueError(\"'stream' or 'filename' should not be \"\n \"specified together with 'handlers'\")\n if handlers is None :\n filename=kwargs.get(\"filename\")\n if filename:\n mode=kwargs.get(\"filemode\",'a')\n h=FileHandler(filename,mode)\n else :\n stream=kwargs.get(\"stream\")\n if stream:\n h=StreamHandler(stream)\n else :\n h=ConsoleHandler()\n handlers=[h]\n fs=kwargs.get(\"format\",BASIC_FORMAT)\n dfs=kwargs.get(\"datefmt\",None )\n style=kwargs.get(\"style\",'%')\n fmt=Formatter(fs,dfs,style)\n for h in handlers:\n if h.formatter is None :\n h.setFormatter(fmt)\n root.addHandler(h)\n level=kwargs.get(\"level\")\n if level is not None :\n root.setLevel(level)\n finally :\n _releaseLock()\n \n \n \n \n \n \ndef getLogger(name=None ):\n ''\n\n\n\n \n if name:\n return Logger.manager.getLogger(name)\n else :\n return root\n \ndef critical(msg,*args,**kwargs):\n ''\n\n\n\n \n if len(root.handlers)==0:\n basicConfig()\n root.critical(msg,*args,**kwargs)\n \nfatal=critical\n\ndef error(msg,*args,**kwargs):\n ''\n\n\n\n \n if len(root.handlers)==0:\n basicConfig()\n root.error(msg,*args,**kwargs)\n \ndef exception(msg,*args,**kwargs):\n ''\n\n\n\n \n kwargs['exc_info']=True\n error(msg,*args,**kwargs)\n \ndef warning(msg,*args,**kwargs):\n ''\n\n\n\n \n if len(root.handlers)==0:\n basicConfig()\n root.warning(msg,*args,**kwargs)\n \ndef warn(msg,*args,**kwargs):\n warnings.warn(\"The 'warn' function is deprecated, \"\n \"use 'warning' instead\",DeprecationWarning,2)\n warning(msg,*args,**kwargs)\n \ndef info(msg,*args,**kwargs):\n ''\n\n\n\n \n if len(root.handlers)==0:\n basicConfig()\n root.info(msg,*args,**kwargs)\n \ndef debug(msg,*args,**kwargs):\n ''\n\n\n\n \n if len(root.handlers)==0:\n basicConfig()\n root.debug(msg,*args,**kwargs)\n \ndef log(level,msg,*args,**kwargs):\n ''\n\n\n\n \n if len(root.handlers)==0:\n basicConfig()\n root.log(level,msg,*args,**kwargs)\n \ndef disable(level):\n ''\n\n \n root.manager.disable=level\n \ndef shutdown(handlerList=_handlerList):\n ''\n\n\n\n\n \n for wr in reversed(handlerList[:]):\n \n \n try :\n h=wr()\n if h:\n try :\n h.acquire()\n h.flush()\n h.close()\n except (IOError,ValueError):\n \n \n \n \n pass\n finally :\n h.release()\n except :\n if raiseExceptions:\n raise\n \n \n \nimport atexit\natexit.register(shutdown)\n\n\n\nclass NullHandler(Handler):\n ''\n\n\n\n\n\n\n\n \n def handle(self,record):\n ''\n \n def emit(self,record):\n ''\n \n def createLock(self):\n self.lock=None\n \n \n \n_warnings_showwarning=None\n\ndef _showwarning(message,category,filename,lineno,file=None ,line=None ):\n ''\n\n\n\n\n\n \n if file is not None :\n if _warnings_showwarning is not None :\n _warnings_showwarning(message,category,filename,lineno,file,line)\n else :\n s=warnings.formatwarning(message,category,filename,lineno,line)\n logger=getLogger(\"py.warnings\")\n if not logger.handlers:\n logger.addHandler(NullHandler())\n logger.warning(\"%s\",s)\n \ndef captureWarnings(capture):\n ''\n\n\n\n \n global _warnings_showwarning\n if capture:\n if _warnings_showwarning is None :\n _warnings_showwarning=warnings.showwarning\n warnings.showwarning=_showwarning\n else :\n if _warnings_showwarning is not None :\n warnings.showwarning=_warnings_showwarning\n _warnings_showwarning=None\n", 1], "xml.etree.ElementPath": [".py", "\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\nimport re\n\nxpath_tokenizer_re=re.compile(\n\"(\"\n\"'[^']*'|\\\"[^\\\"]*\\\"|\"\n\"::|\"\n\"//?|\"\n\"\\.\\.|\"\n\"\\(\\)|\"\n\"[/.*:\\[\\]\\(\\)@=])|\"\n\"((?:\\{[^}]+\\})?[^/\\[\\]\\(\\)@=\\s]+)|\"\n\"\\s+\"\n)\n\ndef xpath_tokenizer(pattern,namespaces=None ):\n for token in xpath_tokenizer_re.findall(pattern):\n tag=token[1]\n if tag and tag[0]!=\"{\"and\":\"in tag:\n try :\n prefix,uri=tag.split(\":\",1)\n if not namespaces:\n raise KeyError\n yield token[0],\"{%s}%s\"%(namespaces[prefix],uri)\n except KeyError:\n raise SyntaxError(\"prefix %r not found in prefix map\"%prefix)\n else :\n yield token\n \ndef get_parent_map(context):\n parent_map=context.parent_map\n if parent_map is None :\n context.parent_map=parent_map={}\n for p in context.root.iter():\n for e in p:\n parent_map[e]=p\n return parent_map\n \ndef prepare_child(next,token):\n tag=token[1]\n def select(context,result):\n for elem in result:\n for e in elem:\n if e.tag ==tag:\n yield e\n return select\n \ndef prepare_star(next,token):\n def select(context,result):\n for elem in result:\n for e in elem:\n yield e\n return select\n \ndef prepare_self(next,token):\n def select(context,result):\n for elem in result:\n yield elem\n return select\n \ndef prepare_descendant(next,token):\n token=next()\n if token[0]==\"*\":\n tag=\"*\"\n elif not token[0]:\n tag=token[1]\n else :\n raise SyntaxError(\"invalid descendant\")\n def select(context,result):\n for elem in result:\n for e in elem.iter(tag):\n if e is not elem:\n yield e\n return select\n \ndef prepare_parent(next,token):\n def select(context,result):\n \n parent_map=get_parent_map(context)\n result_map={}\n for elem in result:\n if elem in parent_map:\n parent=parent_map[elem]\n if parent not in result_map:\n result_map[parent]=None\n yield parent\n return select\n \ndef prepare_predicate(next,token):\n\n\n\n signature=[]\n predicate=[]\n while 1:\n token=next()\n if token[0]==\"]\":\n break\n if token[0]and token[0][:1]in\"'\\\"\":\n token=\"'\",token[0][1:-1]\n signature.append(token[0]or\"-\")\n predicate.append(token[1])\n signature=\"\".join(signature)\n \n if signature ==\"@-\":\n \n key=predicate[1]\n def select(context,result):\n for elem in result:\n if elem.get(key)is not None :\n yield elem\n return select\n if signature ==\"@-='\":\n \n key=predicate[1]\n value=predicate[-1]\n def select(context,result):\n for elem in result:\n if elem.get(key)==value:\n yield elem\n return select\n if signature ==\"-\"and not re.match(\"\\d+$\",predicate[0]):\n \n tag=predicate[0]\n def select(context,result):\n for elem in result:\n if elem.find(tag)is not None :\n yield elem\n return select\n if signature ==\"-='\"and not re.match(\"\\d+$\",predicate[0]):\n \n tag=predicate[0]\n value=predicate[-1]\n def select(context,result):\n for elem in result:\n for e in elem.findall(tag):\n if\"\".join(e.itertext())==value:\n yield elem\n break\n return select\n if signature ==\"-\"or signature ==\"-()\"or signature ==\"-()-\":\n \n if signature ==\"-\":\n index=int(predicate[0])-1\n else :\n if predicate[0]!=\"last\":\n raise SyntaxError(\"unsupported function\")\n if signature ==\"-()-\":\n try :\n index=int(predicate[2])-1\n except ValueError:\n raise SyntaxError(\"unsupported expression\")\n else :\n index=-1\n def select(context,result):\n parent_map=get_parent_map(context)\n for elem in result:\n try :\n parent=parent_map[elem]\n \n elems=list(parent.findall(elem.tag))\n if elems[index]is elem:\n yield elem\n except (IndexError,KeyError):\n pass\n return select\n raise SyntaxError(\"invalid predicate\")\n \nops={\n\"\":prepare_child,\n\"*\":prepare_star,\n\".\":prepare_self,\n\"..\":prepare_parent,\n\"//\":prepare_descendant,\n\"[\":prepare_predicate,\n}\n\n_cache={}\n\nclass _SelectorContext:\n parent_map=None\n def __init__(self,root):\n self.root=root\n \n \n \n \n \n \ndef iterfind(elem,path,namespaces=None ):\n\n if path[-1:]==\"/\":\n path=path+\"*\"\n try :\n selector=_cache[path]\n except KeyError:\n if len(_cache)>100:\n _cache.clear()\n if path[:1]==\"/\":\n raise SyntaxError(\"cannot use absolute path on element\")\n next=iter(xpath_tokenizer(path,namespaces)).__next__\n token=next()\n selector=[]\n while 1:\n try :\n selector.append(ops[token[0]](next,token))\n except StopIteration:\n raise SyntaxError(\"invalid path\")\n try :\n token=next()\n if token[0]==\"/\":\n token=next()\n except StopIteration:\n break\n _cache[path]=selector\n \n result=[elem]\n context=_SelectorContext(elem)\n for select in selector:\n result=select(context,result)\n return result\n \n \n \n \ndef find(elem,path,namespaces=None ):\n try :\n return next(iterfind(elem,path,namespaces))\n except StopIteration:\n return None\n \n \n \n \ndef findall(elem,path,namespaces=None ):\n return list(iterfind(elem,path,namespaces))\n \n \n \n \ndef findtext(elem,path,default=None ,namespaces=None ):\n try :\n elem=next(iterfind(elem,path,namespaces))\n return elem.text or\"\"\n except StopIteration:\n return default\n"], "pydoc_data": [".py", "", 1], "gc": [".py", "''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\nDEBUG_COLLECTABLE=2\n\nDEBUG_LEAK=38\n\nDEBUG_SAVEALL=32\n\nDEBUG_STATS=1\n\nDEBUG_UNCOLLECTABLE=4\n\nclass __loader__:\n pass\n \ncallbacks=[]\n\ndef collect(*args,**kw):\n ''\n\n\n\n\n\n \n pass\n \ndef disable(*args,**kw):\n ''\n\n \n pass\n \ndef enable(*args,**kw):\n ''\n\n \n pass\n \ngarbage=[]\n\ndef get_count(*args,**kw):\n ''\n\n \n pass\n \ndef get_debug(*args,**kw):\n ''\n\n \n pass\n \ndef get_objects(*args,**kw):\n ''\n\n\n \n pass\n \ndef get_referents(*args,**kw):\n ''\n pass\n \ndef get_referrers(*args,**kw):\n ''\n pass\n \ndef get_threshold(*args,**kw):\n ''\n\n \n pass\n \ndef is_tracked(*args,**kw):\n ''\n\n\n \n pass\n \ndef isenabled(*args,**kw):\n ''\n\n \n pass\n \ndef set_debug(*args,**kw):\n ''\n\n\n\n\n\n\n\n\n\n\n \n pass\n \ndef set_threshold(*args,**kw):\n ''\n\n\n \n pass\n"], "asyncio.protocols": [".py", "''\n\n__all__=['BaseProtocol','Protocol','DatagramProtocol',\n'SubprocessProtocol']\n\n\nclass BaseProtocol:\n ''\n\n\n\n\n\n\n \n \n def connection_made(self,transport):\n ''\n\n\n\n\n \n \n def connection_lost(self,exc):\n ''\n\n\n\n\n \n \n def pause_writing(self):\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n \n def resume_writing(self):\n ''\n\n\n \n \n \nclass Protocol(BaseProtocol):\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n \n def data_received(self,data):\n ''\n\n\n \n \n def eof_received(self):\n ''\n\n\n\n\n \n \n \nclass DatagramProtocol(BaseProtocol):\n ''\n \n def datagram_received(self,data,addr):\n ''\n \n def error_received(self,exc):\n ''\n\n\n \n \n \nclass SubprocessProtocol(BaseProtocol):\n ''\n \n def pipe_data_received(self,fd,data):\n ''\n\n\n\n \n \n def pipe_connection_lost(self,fd,exc):\n ''\n\n\n\n \n \n def process_exited(self):\n ''\n"], "encodings.cp500": [".py", "''\n\n\n\nimport codecs\n\n\n\nclass Codec(codecs.Codec):\n\n def encode(self,input,errors='strict'):\n return codecs.charmap_encode(input,errors,encoding_table)\n \n def decode(self,input,errors='strict'):\n return codecs.charmap_decode(input,errors,decoding_table)\n \nclass IncrementalEncoder(codecs.IncrementalEncoder):\n def encode(self,input,final=False ):\n return codecs.charmap_encode(input,self.errors,encoding_table)[0]\n \nclass IncrementalDecoder(codecs.IncrementalDecoder):\n def decode(self,input,final=False ):\n return codecs.charmap_decode(input,self.errors,decoding_table)[0]\n \nclass StreamWriter(Codec,codecs.StreamWriter):\n pass\n \nclass StreamReader(Codec,codecs.StreamReader):\n pass\n \n \n \ndef getregentry():\n return codecs.CodecInfo(\n name='cp500',\n encode=Codec().encode,\n decode=Codec().decode,\n incrementalencoder=IncrementalEncoder,\n incrementaldecoder=IncrementalDecoder,\n streamreader=StreamReader,\n streamwriter=StreamWriter,\n )\n \n \n \n \ndecoding_table=(\n'\\x00'\n'\\x01'\n'\\x02'\n'\\x03'\n'\\x9c'\n'\\t'\n'\\x86'\n'\\x7f'\n'\\x97'\n'\\x8d'\n'\\x8e'\n'\\x0b'\n'\\x0c'\n'\\r'\n'\\x0e'\n'\\x0f'\n'\\x10'\n'\\x11'\n'\\x12'\n'\\x13'\n'\\x9d'\n'\\x85'\n'\\x08'\n'\\x87'\n'\\x18'\n'\\x19'\n'\\x92'\n'\\x8f'\n'\\x1c'\n'\\x1d'\n'\\x1e'\n'\\x1f'\n'\\x80'\n'\\x81'\n'\\x82'\n'\\x83'\n'\\x84'\n'\\n'\n'\\x17'\n'\\x1b'\n'\\x88'\n'\\x89'\n'\\x8a'\n'\\x8b'\n'\\x8c'\n'\\x05'\n'\\x06'\n'\\x07'\n'\\x90'\n'\\x91'\n'\\x16'\n'\\x93'\n'\\x94'\n'\\x95'\n'\\x96'\n'\\x04'\n'\\x98'\n'\\x99'\n'\\x9a'\n'\\x9b'\n'\\x14'\n'\\x15'\n'\\x9e'\n'\\x1a'\n' '\n'\\xa0'\n'\\xe2'\n'\\xe4'\n'\\xe0'\n'\\xe1'\n'\\xe3'\n'\\xe5'\n'\\xe7'\n'\\xf1'\n'['\n'.'\n'<'\n'('\n'+'\n'!'\n'&'\n'\\xe9'\n'\\xea'\n'\\xeb'\n'\\xe8'\n'\\xed'\n'\\xee'\n'\\xef'\n'\\xec'\n'\\xdf'\n']'\n'$'\n'*'\n')'\n';'\n'^'\n'-'\n'/'\n'\\xc2'\n'\\xc4'\n'\\xc0'\n'\\xc1'\n'\\xc3'\n'\\xc5'\n'\\xc7'\n'\\xd1'\n'\\xa6'\n','\n'%'\n'_'\n'>'\n'?'\n'\\xf8'\n'\\xc9'\n'\\xca'\n'\\xcb'\n'\\xc8'\n'\\xcd'\n'\\xce'\n'\\xcf'\n'\\xcc'\n'`'\n':'\n'#'\n'@'\n\"'\"\n'='\n'\"'\n'\\xd8'\n'a'\n'b'\n'c'\n'd'\n'e'\n'f'\n'g'\n'h'\n'i'\n'\\xab'\n'\\xbb'\n'\\xf0'\n'\\xfd'\n'\\xfe'\n'\\xb1'\n'\\xb0'\n'j'\n'k'\n'l'\n'm'\n'n'\n'o'\n'p'\n'q'\n'r'\n'\\xaa'\n'\\xba'\n'\\xe6'\n'\\xb8'\n'\\xc6'\n'\\xa4'\n'\\xb5'\n'~'\n's'\n't'\n'u'\n'v'\n'w'\n'x'\n'y'\n'z'\n'\\xa1'\n'\\xbf'\n'\\xd0'\n'\\xdd'\n'\\xde'\n'\\xae'\n'\\xa2'\n'\\xa3'\n'\\xa5'\n'\\xb7'\n'\\xa9'\n'\\xa7'\n'\\xb6'\n'\\xbc'\n'\\xbd'\n'\\xbe'\n'\\xac'\n'|'\n'\\xaf'\n'\\xa8'\n'\\xb4'\n'\\xd7'\n'{'\n'A'\n'B'\n'C'\n'D'\n'E'\n'F'\n'G'\n'H'\n'I'\n'\\xad'\n'\\xf4'\n'\\xf6'\n'\\xf2'\n'\\xf3'\n'\\xf5'\n'}'\n'J'\n'K'\n'L'\n'M'\n'N'\n'O'\n'P'\n'Q'\n'R'\n'\\xb9'\n'\\xfb'\n'\\xfc'\n'\\xf9'\n'\\xfa'\n'\\xff'\n'\\\\'\n'\\xf7'\n'S'\n'T'\n'U'\n'V'\n'W'\n'X'\n'Y'\n'Z'\n'\\xb2'\n'\\xd4'\n'\\xd6'\n'\\xd2'\n'\\xd3'\n'\\xd5'\n'0'\n'1'\n'2'\n'3'\n'4'\n'5'\n'6'\n'7'\n'8'\n'9'\n'\\xb3'\n'\\xdb'\n'\\xdc'\n'\\xd9'\n'\\xda'\n'\\x9f'\n)\n\n\nencoding_table=codecs.charmap_build(decoding_table)\n"], "datetime": [".py", "''\n\n\n\n\n\nimport time as _time\nimport math as _math\n\ndef _cmp(x,y):\n return 0 if x ==y else 1 if x >y else -1\n \nMINYEAR=1\nMAXYEAR=9999\n_MAXORDINAL=3652059\n\n\n\n\n\n\n\n\n\n\n_DAYS_IN_MONTH=[None ,31,28,31,30,31,30,31,31,30,31,30,31]\n\n_DAYS_BEFORE_MONTH=[None ]\ndbm=0\nfor dim in _DAYS_IN_MONTH[1:]:\n _DAYS_BEFORE_MONTH.append(dbm)\n dbm +=dim\ndel dbm,dim\n\ndef _is_leap(year):\n ''\n return year %4 ==0 and (year %100 !=0 or year %400 ==0)\n \ndef _days_before_year(year):\n ''\n y=year -1\n return y *365+y //4 -y //100+y //400\n \ndef _days_in_month(year,month):\n ''\n assert 1 <=month <=12,month\n if month ==2 and _is_leap(year):\n return 29\n return _DAYS_IN_MONTH[month]\n \ndef _days_before_month(year,month):\n ''\n assert 1 <=month <=12,'month must be in 1..12'\n return _DAYS_BEFORE_MONTH[month]+(month >2 and _is_leap(year))\n \ndef _ymd2ord(year,month,day):\n ''\n assert 1 <=month <=12,'month must be in 1..12'\n dim=_days_in_month(year,month)\n assert 1 <=day <=dim,('day must be in 1..%d'%dim)\n return (_days_before_year(year)+\n _days_before_month(year,month)+\n day)\n \n_DI400Y=_days_before_year(401)\n_DI100Y=_days_before_year(101)\n_DI4Y=_days_before_year(5)\n\n\n\nassert _DI4Y ==4 *365+1\n\n\n\nassert _DI400Y ==4 *_DI100Y+1\n\n\n\nassert _DI100Y ==25 *_DI4Y -1\n\ndef _ord2ymd(n):\n ''\n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n n -=1\n n400,n=divmod(n,_DI400Y)\n year=n400 *400+1\n \n \n \n \n \n \n n100,n=divmod(n,_DI100Y)\n \n \n n4,n=divmod(n,_DI4Y)\n \n \n \n n1,n=divmod(n,365)\n \n year +=n100 *100+n4 *4+n1\n if n1 ==4 or n100 ==4:\n assert n ==0\n return year -1,12,31\n \n \n \n leapyear=n1 ==3 and (n4 !=24 or n100 ==3)\n assert leapyear ==_is_leap(year)\n month=(n+50)>>5\n preceding=_DAYS_BEFORE_MONTH[month]+(month >2 and leapyear)\n if preceding >n:\n month -=1\n preceding -=_DAYS_IN_MONTH[month]+(month ==2 and leapyear)\n n -=preceding\n assert 0 <=n <_days_in_month(year,month)\n \n \n \n return year,month,n+1\n \n \n_MONTHNAMES=[None ,\"Jan\",\"Feb\",\"Mar\",\"Apr\",\"May\",\"Jun\",\n\"Jul\",\"Aug\",\"Sep\",\"Oct\",\"Nov\",\"Dec\"]\n_DAYNAMES=[None ,\"Mon\",\"Tue\",\"Wed\",\"Thu\",\"Fri\",\"Sat\",\"Sun\"]\n\n\ndef _build_struct_time(y,m,d,hh,mm,ss,dstflag):\n wday=(_ymd2ord(y,m,d)+6)%7\n dnum=_days_before_month(y,m)+d\n return _time.struct_time((y,m,d,hh,mm,ss,wday,dnum,dstflag))\n \ndef _format_time(hh,mm,ss,us):\n\n result=\"%02d:%02d:%02d\"%(hh,mm,ss)\n if us:\n result +=\".%06d\"%us\n return result\n \n \ndef _wrap_strftime(object,format,timetuple):\n\n freplace=None\n zreplace=None\n Zreplace=None\n \n \n newformat=[]\n push=newformat.append\n i,n=0,len(format)\n while i 999999999:\n raise OverflowError(\"timedelta # of days is too large: %d\"%d)\n \n return self\n \n def __repr__(self):\n if self._microseconds:\n return\"%s(%d, %d, %d)\"%('datetime.'+self.__class__.__name__,\n self._days,\n self._seconds,\n self._microseconds)\n if self._seconds:\n return\"%s(%d, %d)\"%('datetime.'+self.__class__.__name__,\n self._days,\n self._seconds)\n return\"%s(%d)\"%('datetime.'+self.__class__.__name__,self._days)\n \n def __str__(self):\n mm,ss=divmod(self._seconds,60)\n hh,mm=divmod(mm,60)\n s=\"%d:%02d:%02d\"%(hh,mm,ss)\n if self._days:\n def plural(n):\n return n,abs(n)!=1 and\"s\"or\"\"\n s=(\"%d day%s, \"%plural(self._days))+s\n if self._microseconds:\n s=s+\".%06d\"%self._microseconds\n return s\n \n def total_seconds(self):\n ''\n return ((self.days *86400+self.seconds)*10 **6+\n self.microseconds)/10 **6\n \n \n @property\n def days(self):\n ''\n return self._days\n \n @property\n def seconds(self):\n ''\n return self._seconds\n \n @property\n def microseconds(self):\n ''\n return self._microseconds\n \n def __add__(self,other):\n if isinstance(other,timedelta):\n \n \n return timedelta(self._days+other._days,\n self._seconds+other._seconds,\n self._microseconds+other._microseconds)\n return NotImplemented\n \n __radd__=__add__\n \n def __sub__(self,other):\n if isinstance(other,timedelta):\n \n \n return timedelta(self._days -other._days,\n self._seconds -other._seconds,\n self._microseconds -other._microseconds)\n return NotImplemented\n \n def __rsub__(self,other):\n if isinstance(other,timedelta):\n return -self+other\n return NotImplemented\n \n def __neg__(self):\n \n \n return timedelta(-self._days,\n -self._seconds,\n -self._microseconds)\n \n def __pos__(self):\n return self\n \n def __abs__(self):\n if self._days <0:\n return -self\n else :\n return self\n \n def __mul__(self,other):\n if isinstance(other,int):\n \n \n return timedelta(self._days *other,\n self._seconds *other,\n self._microseconds *other)\n if isinstance(other,float):\n a,b=other.as_integer_ratio()\n return self *a /b\n return NotImplemented\n \n __rmul__=__mul__\n \n def _to_microseconds(self):\n return ((self._days *(24 *3600)+self._seconds)*1000000+\n self._microseconds)\n \n def __floordiv__(self,other):\n if not isinstance(other,(int,timedelta)):\n return NotImplemented\n usec=self._to_microseconds()\n if isinstance(other,timedelta):\n return usec //other._to_microseconds()\n if isinstance(other,int):\n return timedelta(0,0,usec //other)\n \n def __truediv__(self,other):\n if not isinstance(other,(int,float,timedelta)):\n return NotImplemented\n usec=self._to_microseconds()\n if isinstance(other,timedelta):\n return usec /other._to_microseconds()\n if isinstance(other,int):\n return timedelta(0,0,usec /other)\n if isinstance(other,float):\n a,b=other.as_integer_ratio()\n return timedelta(0,0,b *usec /a)\n \n def __mod__(self,other):\n if isinstance(other,timedelta):\n r=self._to_microseconds()%other._to_microseconds()\n return timedelta(0,0,r)\n return NotImplemented\n \n def __divmod__(self,other):\n if isinstance(other,timedelta):\n q,r=divmod(self._to_microseconds(),\n other._to_microseconds())\n return q,timedelta(0,0,r)\n return NotImplemented\n \n \n \n def __eq__(self,other):\n if isinstance(other,timedelta):\n return self._cmp(other)==0\n else :\n return False\n \n def __ne__(self,other):\n if isinstance(other,timedelta):\n return self._cmp(other)!=0\n else :\n return True\n \n def __le__(self,other):\n if isinstance(other,timedelta):\n return self._cmp(other)<=0\n else :\n _cmperror(self,other)\n \n def __lt__(self,other):\n if isinstance(other,timedelta):\n return self._cmp(other)<0\n else :\n _cmperror(self,other)\n \n def __ge__(self,other):\n if isinstance(other,timedelta):\n return self._cmp(other)>=0\n else :\n _cmperror(self,other)\n \n def __gt__(self,other):\n if isinstance(other,timedelta):\n return self._cmp(other)>0\n else :\n _cmperror(self,other)\n \n def _cmp(self,other):\n assert isinstance(other,timedelta)\n return _cmp(self._getstate(),other._getstate())\n \n def __hash__(self):\n return hash(self._getstate())\n \n def __bool__(self):\n return (self._days !=0 or\n self._seconds !=0 or\n self._microseconds !=0)\n \n \n \n def _getstate(self):\n return (self._days,self._seconds,self._microseconds)\n \n def __reduce__(self):\n return (self.__class__,self._getstate())\n \ntimedelta.min=timedelta(-999999999)\ntimedelta.max=timedelta(days=999999999,hours=23,minutes=59,seconds=59,\nmicroseconds=999999)\ntimedelta.resolution=timedelta(microseconds=1)\n\nclass date:\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n __slots__='_year','_month','_day'\n \n def __new__(cls,year,month=None ,day=None ):\n ''\n\n\n\n\n \n if (isinstance(year,bytes)and len(year)==4 and\n 1 <=year[2]<=12 and month is None ):\n \n self=object.__new__(cls)\n self.__setstate(year)\n return self\n _check_date_fields(year,month,day)\n self=object.__new__(cls)\n self._year=year\n self._month=month\n self._day=day\n return self\n \n \n \n @classmethod\n def fromtimestamp(cls,t):\n ''\n y,m,d,hh,mm,ss,weekday,jday,dst=_time.localtime(t)\n return cls(y,m,d)\n \n @classmethod\n def today(cls):\n ''\n t=_time.time()\n return cls.fromtimestamp(t)\n \n @classmethod\n def fromordinal(cls,n):\n ''\n\n\n\n \n y,m,d=_ord2ymd(n)\n return cls(y,m,d)\n \n \n \n def __repr__(self):\n ''\n\n\n\n\n\n\n\n\n \n return\"%s(%d, %d, %d)\"%('datetime.'+self.__class__.__name__,\n self._year,\n self._month,\n self._day)\n \n \n \n \n \n \n def ctime(self):\n ''\n weekday=self.toordinal()%7 or 7\n return\"%s %s %2d 00:00:00 %04d\"%(\n _DAYNAMES[weekday],\n _MONTHNAMES[self._month],\n self._day,self._year)\n \n def strftime(self,fmt):\n ''\n return _wrap_strftime(self,fmt,self.timetuple())\n \n def __format__(self,fmt):\n if len(fmt)!=0:\n return self.strftime(fmt)\n return str(self)\n \n def isoformat(self):\n ''\n\n\n\n\n\n\n \n return\"%04d-%02d-%02d\"%(self._year,self._month,self._day)\n \n __str__=isoformat\n \n \n @property\n def year(self):\n ''\n return self._year\n \n @property\n def month(self):\n ''\n return self._month\n \n @property\n def day(self):\n ''\n return self._day\n \n \n \n def timetuple(self):\n ''\n return _build_struct_time(self._year,self._month,self._day,\n 0,0,0,-1)\n \n def toordinal(self):\n ''\n\n\n\n \n return _ymd2ord(self._year,self._month,self._day)\n \n def replace(self,year=None ,month=None ,day=None ):\n ''\n if year is None :\n year=self._year\n if month is None :\n month=self._month\n if day is None :\n day=self._day\n _check_date_fields(year,month,day)\n return date(year,month,day)\n \n \n \n def __eq__(self,other):\n if isinstance(other,date):\n return self._cmp(other)==0\n return NotImplemented\n \n def __ne__(self,other):\n if isinstance(other,date):\n return self._cmp(other)!=0\n return NotImplemented\n \n def __le__(self,other):\n if isinstance(other,date):\n return self._cmp(other)<=0\n return NotImplemented\n \n def __lt__(self,other):\n if isinstance(other,date):\n return self._cmp(other)<0\n return NotImplemented\n \n def __ge__(self,other):\n if isinstance(other,date):\n return self._cmp(other)>=0\n return NotImplemented\n \n def __gt__(self,other):\n if isinstance(other,date):\n return self._cmp(other)>0\n return NotImplemented\n \n def _cmp(self,other):\n assert isinstance(other,date)\n y,m,d=self._year,self._month,self._day\n y2,m2,d2=other._year,other._month,other._day\n return _cmp((y,m,d),(y2,m2,d2))\n \n def __hash__(self):\n ''\n return hash(self._getstate())\n \n \n \n def __add__(self,other):\n ''\n if isinstance(other,timedelta):\n o=self.toordinal()+other.days\n if 0 =52:\n if today >=_isoweek1monday(year+1):\n year +=1\n week=0\n return year,week+1,day+1\n \n \n \n def _getstate(self):\n yhi,ylo=divmod(self._year,256)\n return bytes([yhi,ylo,self._month,self._day]),\n \n def __setstate(self,string):\n if len(string)!=4 or not (1 <=string[2]<=12):\n raise TypeError(\"not enough arguments\")\n yhi,ylo,self._month,self._day=string\n self._year=yhi *256+ylo\n \n def __reduce__(self):\n return (self.__class__,self._getstate())\n \n_date_class=date\n\ndate.min=date(1,1,1)\ndate.max=date(9999,12,31)\ndate.resolution=timedelta(days=1)\n\nclass tzinfo:\n ''\n\n\n \n __slots__=()\n def tzname(self,dt):\n ''\n raise NotImplementedError(\"tzinfo subclass must override tzname()\")\n \n def utcoffset(self,dt):\n ''\n raise NotImplementedError(\"tzinfo subclass must override utcoffset()\")\n \n def dst(self,dt):\n ''\n\n\n\n \n raise NotImplementedError(\"tzinfo subclass must override dst()\")\n \n def fromutc(self,dt):\n ''\n \n if not isinstance(dt,datetime):\n raise TypeError(\"fromutc() requires a datetime argument\")\n if dt.tzinfo is not self:\n raise ValueError(\"dt.tzinfo is not self\")\n \n dtoff=dt.utcoffset()\n if dtoff is None :\n raise ValueError(\"fromutc() requires a non-None utcoffset() \"\n \"result\")\n \n \n \n dtdst=dt.dst()\n if dtdst is None :\n raise ValueError(\"fromutc() requires a non-None dst() result\")\n delta=dtoff -dtdst\n if delta:\n dt +=delta\n dtdst=dt.dst()\n if dtdst is None :\n raise ValueError(\"fromutc(): dt.dst gave inconsistent \"\n \"results; cannot convert\")\n return dt+dtdst\n \n \n \n def __reduce__(self):\n getinitargs=getattr(self,\"__getinitargs__\",None )\n if getinitargs:\n args=getinitargs()\n else :\n args=()\n getstate=getattr(self,\"__getstate__\",None )\n if getstate:\n state=getstate()\n else :\n state=getattr(self,\"__dict__\",None )or None\n if state is None :\n return (self.__class__,args)\n else :\n return (self.__class__,args,state)\n \n_tzinfo_class=tzinfo\n\nclass time:\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n \n def __new__(cls,hour=0,minute=0,second=0,microsecond=0,tzinfo=None ):\n ''\n\n\n\n\n\n\n \n self=object.__new__(cls)\n if isinstance(hour,bytes)and len(hour)==6:\n \n self.__setstate(hour,minute or None )\n return self\n _check_tzinfo_arg(tzinfo)\n _check_time_fields(hour,minute,second,microsecond)\n self._hour=hour\n self._minute=minute\n self._second=second\n self._microsecond=microsecond\n self._tzinfo=tzinfo\n return self\n \n \n @property\n def hour(self):\n ''\n return self._hour\n \n @property\n def minute(self):\n ''\n return self._minute\n \n @property\n def second(self):\n ''\n return self._second\n \n @property\n def microsecond(self):\n ''\n return self._microsecond\n \n @property\n def tzinfo(self):\n ''\n return self._tzinfo\n \n \n \n \n \n def __eq__(self,other):\n if isinstance(other,time):\n return self._cmp(other,allow_mixed=True )==0\n else :\n return False\n \n def __ne__(self,other):\n if isinstance(other,time):\n return self._cmp(other,allow_mixed=True )!=0\n else :\n return True\n \n def __le__(self,other):\n if isinstance(other,time):\n return self._cmp(other)<=0\n else :\n _cmperror(self,other)\n \n def __lt__(self,other):\n if isinstance(other,time):\n return self._cmp(other)<0\n else :\n _cmperror(self,other)\n \n def __ge__(self,other):\n if isinstance(other,time):\n return self._cmp(other)>=0\n else :\n _cmperror(self,other)\n \n def __gt__(self,other):\n if isinstance(other,time):\n return self._cmp(other)>0\n else :\n _cmperror(self,other)\n \n def _cmp(self,other,allow_mixed=False ):\n assert isinstance(other,time)\n mytz=self._tzinfo\n ottz=other._tzinfo\n myoff=otoff=None\n \n if mytz is ottz:\n base_compare=True\n else :\n myoff=self.utcoffset()\n otoff=other.utcoffset()\n base_compare=myoff ==otoff\n \n if base_compare:\n return _cmp((self._hour,self._minute,self._second,\n self._microsecond),\n (other._hour,other._minute,other._second,\n other._microsecond))\n if myoff is None or otoff is None :\n if allow_mixed:\n return 2\n else :\n raise TypeError(\"cannot compare naive and aware times\")\n myhhmm=self._hour *60+self._minute -myoff //timedelta(minutes=1)\n othhmm=other._hour *60+other._minute -otoff //timedelta(minutes=1)\n return _cmp((myhhmm,self._second,self._microsecond),\n (othhmm,other._second,other._microsecond))\n \n def __hash__(self):\n ''\n tzoff=self.utcoffset()\n if not tzoff:\n return hash(self._getstate()[0])\n h,m=divmod(timedelta(hours=self.hour,minutes=self.minute)-tzoff,\n timedelta(hours=1))\n assert not m %timedelta(minutes=1),\"whole minute\"\n m //=timedelta(minutes=1)\n if 0 <=h <24:\n return hash(time(h,m,self.second,self.microsecond))\n return hash((h,m,self.second,self.microsecond))\n \n \n \n def _tzstr(self,sep=\":\"):\n ''\n off=self.utcoffset()\n if off is not None :\n if off.days <0:\n sign=\"-\"\n off=-off\n else :\n sign=\"+\"\n hh,mm=divmod(off,timedelta(hours=1))\n assert not mm %timedelta(minutes=1),\"whole minute\"\n mm //=timedelta(minutes=1)\n assert 0 <=hh <24\n off=\"%s%02d%s%02d\"%(sign,hh,sep,mm)\n return off\n \n def __repr__(self):\n ''\n if self._microsecond !=0:\n s=\", %d, %d\"%(self._second,self._microsecond)\n elif self._second !=0:\n s=\", %d\"%self._second\n else :\n s=\"\"\n s=\"%s(%d, %d%s)\"%('datetime.'+self.__class__.__name__,\n self._hour,self._minute,s)\n if self._tzinfo is not None :\n assert s[-1:]==\")\"\n s=s[:-1]+\", tzinfo=%r\"%self._tzinfo+\")\"\n return s\n \n def isoformat(self):\n ''\n\n\n\n \n s=_format_time(self._hour,self._minute,self._second,\n self._microsecond)\n tz=self._tzstr()\n if tz:\n s +=tz\n return s\n \n __str__=isoformat\n \n def strftime(self,fmt):\n ''\n\n \n \n \n timetuple=(1900,1,1,\n self._hour,self._minute,self._second,\n 0,1,-1)\n return _wrap_strftime(self,fmt,timetuple)\n \n def __format__(self,fmt):\n if len(fmt)!=0:\n return self.strftime(fmt)\n return str(self)\n \n \n \n def utcoffset(self):\n ''\n \n if self._tzinfo is None :\n return None\n offset=self._tzinfo.utcoffset(None )\n _check_utc_offset(\"utcoffset\",offset)\n return offset\n \n def tzname(self):\n ''\n\n\n\n\n \n if self._tzinfo is None :\n return None\n name=self._tzinfo.tzname(None )\n _check_tzname(name)\n return name\n \n def dst(self):\n ''\n\n\n\n\n\n\n \n if self._tzinfo is None :\n return None\n offset=self._tzinfo.dst(None )\n _check_utc_offset(\"dst\",offset)\n return offset\n \n def replace(self,hour=None ,minute=None ,second=None ,microsecond=None ,\n tzinfo=True ):\n ''\n if hour is None :\n hour=self.hour\n if minute is None :\n minute=self.minute\n if second is None :\n second=self.second\n if microsecond is None :\n microsecond=self.microsecond\n if tzinfo is True :\n tzinfo=self.tzinfo\n _check_time_fields(hour,minute,second,microsecond)\n _check_tzinfo_arg(tzinfo)\n return time(hour,minute,second,microsecond,tzinfo)\n \n def __bool__(self):\n if self.second or self.microsecond:\n return True\n offset=self.utcoffset()or timedelta(0)\n return timedelta(hours=self.hour,minutes=self.minute)!=offset\n \n \n \n def _getstate(self):\n us2,us3=divmod(self._microsecond,256)\n us1,us2=divmod(us2,256)\n basestate=bytes([self._hour,self._minute,self._second,\n us1,us2,us3])\n if self._tzinfo is None :\n return (basestate,)\n else :\n return (basestate,self._tzinfo)\n \n def __setstate(self,string,tzinfo):\n if len(string)!=6 or string[0]>=24:\n raise TypeError(\"an integer is required\")\n (self._hour,self._minute,self._second,\n us1,us2,us3)=string\n self._microsecond=(((us1 <<8)|us2)<<8)|us3\n if tzinfo is None or isinstance(tzinfo,_tzinfo_class):\n self._tzinfo=tzinfo\n else :\n raise TypeError(\"bad tzinfo state arg %r\"%tzinfo)\n \n def __reduce__(self):\n return (time,self._getstate())\n \n_time_class=time\n\ntime.min=time(0,0,0)\ntime.max=time(23,59,59,999999)\ntime.resolution=timedelta(microseconds=1)\n\nclass datetime(date):\n ''\n\n\n\n \n \n __slots__=date.__slots__+(\n '_hour','_minute','_second',\n '_microsecond','_tzinfo')\n def __new__(cls,year,month=None ,day=None ,hour=0,minute=0,second=0,\n microsecond=0,tzinfo=None ):\n if isinstance(year,bytes)and len(year)==10:\n \n self=date.__new__(cls,year[:4])\n self.__setstate(year,month)\n return self\n _check_tzinfo_arg(tzinfo)\n _check_time_fields(hour,minute,second,microsecond)\n self=date.__new__(cls,year,month,day)\n self._hour=hour\n self._minute=minute\n self._second=second\n self._microsecond=microsecond\n self._tzinfo=tzinfo\n return self\n \n \n @property\n def hour(self):\n ''\n return self._hour\n \n @property\n def minute(self):\n ''\n return self._minute\n \n @property\n def second(self):\n ''\n return self._second\n \n @property\n def microsecond(self):\n ''\n return self._microsecond\n \n @property\n def tzinfo(self):\n ''\n return self._tzinfo\n \n @classmethod\n def fromtimestamp(cls,t,tz=None ):\n ''\n\n\n \n \n _check_tzinfo_arg(tz)\n \n converter=_time.localtime if tz is None else _time.gmtime\n \n t,frac=divmod(t,1.0)\n us=int(frac *1e6)\n \n \n \n \n \n if us ==1000000:\n t +=1\n us=0\n y,m,d,hh,mm,ss,weekday,jday,dst=converter(t)\n ss=min(ss,59)\n result=cls(y,m,d,hh,mm,ss,us,tz)\n if tz is not None :\n result=tz.fromutc(result)\n return result\n \n @classmethod\n def utcfromtimestamp(cls,t):\n ''\n t,frac=divmod(t,1.0)\n us=int(frac *1e6)\n \n \n \n \n \n if us ==1000000:\n t +=1\n us=0\n y,m,d,hh,mm,ss,weekday,jday,dst=_time.gmtime(t)\n ss=min(ss,59)\n return cls(y,m,d,hh,mm,ss,us)\n \n \n \n \n \n \n @classmethod\n def now(cls,tz=None ):\n ''\n t=_time.time()\n return cls.fromtimestamp(t,tz)\n \n @classmethod\n def utcnow(cls):\n ''\n t=_time.time()\n return cls.utcfromtimestamp(t)\n \n @classmethod\n def combine(cls,date,time):\n ''\n if not isinstance(date,_date_class):\n raise TypeError(\"date argument must be a date instance\")\n if not isinstance(time,_time_class):\n raise TypeError(\"time argument must be a time instance\")\n return cls(date.year,date.month,date.day,\n time.hour,time.minute,time.second,time.microsecond,\n time.tzinfo)\n \n def timetuple(self):\n ''\n dst=self.dst()\n if dst is None :\n dst=-1\n elif dst:\n dst=1\n else :\n dst=0\n return _build_struct_time(self.year,self.month,self.day,\n self.hour,self.minute,self.second,\n dst)\n \n def timestamp(self):\n ''\n if self._tzinfo is None :\n return _time.mktime((self.year,self.month,self.day,\n self.hour,self.minute,self.second,\n -1,-1,-1))+self.microsecond /1e6\n else :\n return (self -_EPOCH).total_seconds()\n \n def utctimetuple(self):\n ''\n offset=self.utcoffset()\n if offset:\n self -=offset\n y,m,d=self.year,self.month,self.day\n hh,mm,ss=self.hour,self.minute,self.second\n return _build_struct_time(y,m,d,hh,mm,ss,0)\n \n def date(self):\n ''\n return date(self._year,self._month,self._day)\n \n def time(self):\n ''\n return time(self.hour,self.minute,self.second,self.microsecond)\n \n def timetz(self):\n ''\n return time(self.hour,self.minute,self.second,self.microsecond,\n self._tzinfo)\n \n def replace(self,year=None ,month=None ,day=None ,hour=None ,\n minute=None ,second=None ,microsecond=None ,tzinfo=True ):\n ''\n if year is None :\n year=self.year\n if month is None :\n month=self.month\n if day is None :\n day=self.day\n if hour is None :\n hour=self.hour\n if minute is None :\n minute=self.minute\n if second is None :\n second=self.second\n if microsecond is None :\n microsecond=self.microsecond\n if tzinfo is True :\n tzinfo=self.tzinfo\n _check_date_fields(year,month,day)\n _check_time_fields(hour,minute,second,microsecond)\n _check_tzinfo_arg(tzinfo)\n return datetime(year,month,day,hour,minute,second,\n microsecond,tzinfo)\n \n def astimezone(self,tz=None ):\n if tz is None :\n if self.tzinfo is None :\n raise ValueError(\"astimezone() requires an aware datetime\")\n ts=(self -_EPOCH)//timedelta(seconds=1)\n localtm=_time.localtime(ts)\n local=datetime(*localtm[:6])\n try :\n \n gmtoff=localtm.tm_gmtoff\n zone=localtm.tm_zone\n except AttributeError:\n \n \n \n delta=local -datetime(*_time.gmtime(ts)[:6])\n dst=_time.daylight and localtm.tm_isdst >0\n gmtoff=-(_time.altzone if dst else _time.timezone)\n if delta ==timedelta(seconds=gmtoff):\n tz=timezone(delta,_time.tzname[dst])\n else :\n tz=timezone(delta)\n else :\n tz=timezone(timedelta(seconds=gmtoff),zone)\n \n elif not isinstance(tz,tzinfo):\n raise TypeError(\"tz argument must be an instance of tzinfo\")\n \n mytz=self.tzinfo\n if mytz is None :\n raise ValueError(\"astimezone() requires an aware datetime\")\n \n if tz is mytz:\n return self\n \n \n myoffset=self.utcoffset()\n if myoffset is None :\n raise ValueError(\"astimezone() requires an aware datetime\")\n utc=(self -myoffset).replace(tzinfo=tz)\n \n \n return tz.fromutc(utc)\n \n \n \n def ctime(self):\n ''\n weekday=self.toordinal()%7 or 7\n return\"%s %s %2d %02d:%02d:%02d %04d\"%(\n _DAYNAMES[weekday],\n _MONTHNAMES[self._month],\n self._day,\n self._hour,self._minute,self._second,\n self._year)\n \n def isoformat(self,sep='T'):\n ''\n\n\n\n\n\n\n\n\n\n \n s=(\"%04d-%02d-%02d%c\"%(self._year,self._month,self._day,\n sep)+\n _format_time(self._hour,self._minute,self._second,\n self._microsecond))\n off=self.utcoffset()\n if off is not None :\n if off.days <0:\n sign=\"-\"\n off=-off\n else :\n sign=\"+\"\n hh,mm=divmod(off,timedelta(hours=1))\n assert not mm %timedelta(minutes=1),\"whole minute\"\n mm //=timedelta(minutes=1)\n s +=\"%s%02d:%02d\"%(sign,hh,mm)\n return s\n \n def __repr__(self):\n ''\n L=[self._year,self._month,self._day,\n self._hour,self._minute,self._second,self._microsecond]\n if L[-1]==0:\n del L[-1]\n if L[-1]==0:\n del L[-1]\n s=\", \".join(map(str,L))\n s=\"%s(%s)\"%('datetime.'+self.__class__.__name__,s)\n if self._tzinfo is not None :\n assert s[-1:]==\")\"\n s=s[:-1]+\", tzinfo=%r\"%self._tzinfo+\")\"\n return s\n \n def __str__(self):\n ''\n return self.isoformat(sep=' ')\n \n @classmethod\n def strptime(cls,date_string,format):\n ''\n import _strptime\n return _strptime._strptime_datetime(cls,date_string,format)\n \n def utcoffset(self):\n ''\n \n if self._tzinfo is None :\n return None\n offset=self._tzinfo.utcoffset(self)\n _check_utc_offset(\"utcoffset\",offset)\n return offset\n \n def tzname(self):\n ''\n\n\n\n\n \n name=_call_tzinfo_method(self._tzinfo,\"tzname\",self)\n _check_tzname(name)\n return name\n \n def dst(self):\n ''\n\n\n\n\n\n\n \n if self._tzinfo is None :\n return None\n offset=self._tzinfo.dst(self)\n _check_utc_offset(\"dst\",offset)\n return offset\n \n \n \n def __eq__(self,other):\n if isinstance(other,datetime):\n return self._cmp(other,allow_mixed=True )==0\n elif not isinstance(other,date):\n return NotImplemented\n else :\n return False\n \n def __ne__(self,other):\n if isinstance(other,datetime):\n return self._cmp(other,allow_mixed=True )!=0\n elif not isinstance(other,date):\n return NotImplemented\n else :\n return True\n \n def __le__(self,other):\n if isinstance(other,datetime):\n return self._cmp(other)<=0\n elif not isinstance(other,date):\n return NotImplemented\n else :\n _cmperror(self,other)\n \n def __lt__(self,other):\n if isinstance(other,datetime):\n return self._cmp(other)<0\n elif not isinstance(other,date):\n return NotImplemented\n else :\n _cmperror(self,other)\n \n def __ge__(self,other):\n if isinstance(other,datetime):\n return self._cmp(other)>=0\n elif not isinstance(other,date):\n return NotImplemented\n else :\n _cmperror(self,other)\n \n def __gt__(self,other):\n if isinstance(other,datetime):\n return self._cmp(other)>0\n elif not isinstance(other,date):\n return NotImplemented\n else :\n _cmperror(self,other)\n \n def _cmp(self,other,allow_mixed=False ):\n assert isinstance(other,datetime)\n mytz=self._tzinfo\n ottz=other._tzinfo\n myoff=otoff=None\n \n if mytz is ottz:\n base_compare=True\n else :\n myoff=self.utcoffset()\n otoff=other.utcoffset()\n base_compare=myoff ==otoff\n \n if base_compare:\n return _cmp((self._year,self._month,self._day,\n self._hour,self._minute,self._second,\n self._microsecond),\n (other._year,other._month,other._day,\n other._hour,other._minute,other._second,\n other._microsecond))\n if myoff is None or otoff is None :\n if allow_mixed:\n return 2\n else :\n raise TypeError(\"cannot compare naive and aware datetimes\")\n \n diff=self -other\n if diff.days <0:\n return -1\n return diff and 1 or 0\n \n def __add__(self,other):\n ''\n if not isinstance(other,timedelta):\n return NotImplemented\n delta=timedelta(self.toordinal(),\n hours=self._hour,\n minutes=self._minute,\n seconds=self._second,\n microseconds=self._microsecond)\n delta +=other\n hour,rem=divmod(delta.seconds,3600)\n minute,second=divmod(rem,60)\n if 0 THURSDAY:\n week1monday +=7\n return week1monday\n \nclass timezone(tzinfo):\n __slots__='_offset','_name'\n \n \n _Omitted=object()\n def __new__(cls,offset,name=_Omitted):\n if not isinstance(offset,timedelta):\n raise TypeError(\"offset must be a timedelta\")\n if name is cls._Omitted:\n if not offset:\n return cls.utc\n name=None\n elif not isinstance(name,str):\n raise TypeError(\"name must be a string\")\n if not cls._minoffset <=offset <=cls._maxoffset:\n raise ValueError(\"offset must be a timedelta\"\n \" strictly between -timedelta(hours=24) and\"\n \" timedelta(hours=24).\")\n if (offset.microseconds !=0 or\n offset.seconds %60 !=0):\n raise ValueError(\"offset must be a timedelta\"\n \" representing a whole number of minutes\")\n return cls._create(offset,name)\n \n @classmethod\n def _create(cls,offset,name=None ):\n self=tzinfo.__new__(cls)\n self._offset=offset\n self._name=name\n return self\n \n def __getinitargs__(self):\n ''\n if self._name is None :\n return (self._offset,)\n return (self._offset,self._name)\n \n def __eq__(self,other):\n if type(other)!=timezone:\n return False\n return self._offset ==other._offset\n \n def __hash__(self):\n return hash(self._offset)\n \n def __repr__(self):\n ''\n\n\n\n\n\n\n\n \n if self is self.utc:\n return'datetime.timezone.utc'\n if self._name is None :\n return\"%s(%r)\"%('datetime.'+self.__class__.__name__,\n self._offset)\n return\"%s(%r, %r)\"%('datetime.'+self.__class__.__name__,\n self._offset,self._name)\n \n def __str__(self):\n return self.tzname(None )\n \n def utcoffset(self,dt):\n if isinstance(dt,datetime)or dt is None :\n return self._offset\n raise TypeError(\"utcoffset() argument must be a datetime instance\"\n \" or None\")\n \n def tzname(self,dt):\n if isinstance(dt,datetime)or dt is None :\n if self._name is None :\n return self._name_from_offset(self._offset)\n return self._name\n raise TypeError(\"tzname() argument must be a datetime instance\"\n \" or None\")\n \n def dst(self,dt):\n if isinstance(dt,datetime)or dt is None :\n return None\n raise TypeError(\"dst() argument must be a datetime instance\"\n \" or None\")\n \n def fromutc(self,dt):\n if isinstance(dt,datetime):\n if dt.tzinfo is not self:\n raise ValueError(\"fromutc: dt.tzinfo \"\n \"is not self\")\n return dt+self._offset\n raise TypeError(\"fromutc() argument must be a datetime instance\"\n \" or None\")\n \n _maxoffset=timedelta(hours=23,minutes=59)\n _minoffset=-_maxoffset\n \n @staticmethod\n def _name_from_offset(delta):\n if delta other.int\n return NotImplemented\n \n def __le__(self,other):\n if isinstance(other,UUID):\n return self.int <=other.int\n return NotImplemented\n \n def __ge__(self,other):\n if isinstance(other,UUID):\n return self.int >=other.int\n return NotImplemented\n \n def __hash__(self):\n return hash(self.int)\n \n def __int__(self):\n return self.int\n \n def __repr__(self):\n return'UUID(%r)'%str(self)\n \n def __setattr__(self,name,value):\n raise TypeError('UUID objects are immutable')\n \n def __str__(self):\n hex='%032x'%self.int\n return'%s-%s-%s-%s-%s'%(\n hex[:8],hex[8:12],hex[12:16],hex[16:20],hex[20:])\n \n @property\n def bytes(self):\n bytes=bytearray()\n for shift in range(0,128,8):\n bytes.insert(0,(self.int >>shift)&0xff)\n return bytes_(bytes)\n \n @property\n def bytes_le(self):\n bytes=self.bytes\n return (bytes_(reversed(bytes[0:4]))+\n bytes_(reversed(bytes[4:6]))+\n bytes_(reversed(bytes[6:8]))+\n bytes[8:])\n \n @property\n def fields(self):\n return (self.time_low,self.time_mid,self.time_hi_version,\n self.clock_seq_hi_variant,self.clock_seq_low,self.node)\n \n @property\n def time_low(self):\n return self.int >>96\n \n @property\n def time_mid(self):\n return (self.int >>80)&0xffff\n \n @property\n def time_hi_version(self):\n return (self.int >>64)&0xffff\n \n @property\n def clock_seq_hi_variant(self):\n return (self.int >>56)&0xff\n \n @property\n def clock_seq_low(self):\n return (self.int >>48)&0xff\n \n @property\n def time(self):\n return (((self.time_hi_version&0x0fff)<<48)|\n (self.time_mid <<32)|self.time_low)\n \n @property\n def clock_seq(self):\n return (((self.clock_seq_hi_variant&0x3f)<<8)|\n self.clock_seq_low)\n \n @property\n def node(self):\n return self.int&0xffffffffffff\n \n @property\n def hex(self):\n return'%032x'%self.int\n \n @property\n def urn(self):\n return'urn:uuid:'+str(self)\n \n @property\n def variant(self):\n if not self.int&(0x8000 <<48):\n return RESERVED_NCS\n elif not self.int&(0x4000 <<48):\n return RFC_4122\n elif not self.int&(0x2000 <<48):\n return RESERVED_MICROSOFT\n else :\n return RESERVED_FUTURE\n \n @property\n def version(self):\n \n if self.variant ==RFC_4122:\n return int((self.int >>76)&0xf)\n \ndef _find_mac(command,args,hw_identifiers,get_index):\n import os,shutil\n executable=shutil.which(command)\n if executable is None :\n path=os.pathsep.join(('/sbin','/usr/sbin'))\n executable=shutil.which(command,path=path)\n if executable is None :\n return None\n \n try :\n \n \n \n cmd='LC_ALL=C %s %s 2>/dev/null'%(executable,args)\n with os.popen(cmd)as pipe:\n for line in pipe:\n words=line.lower().split()\n for i in range(len(words)):\n if words[i]in hw_identifiers:\n try :\n return int(\n words[get_index(i)].replace(':',''),16)\n except (ValueError,IndexError):\n \n \n \n \n \n pass\n except OSError:\n pass\n \ndef _ifconfig_getnode():\n ''\n \n \n for args in ('','-a','-av'):\n mac=_find_mac('ifconfig',args,['hwaddr','ether'],lambda i:i+1)\n if mac:\n return mac\n \n import socket\n ip_addr=socket.gethostbyname(socket.gethostname())\n \n \n mac=_find_mac('arp','-an',[ip_addr],lambda i:-1)\n if mac:\n return mac\n \n \n mac=_find_mac('lanscan','-ai',['lan0'],lambda i:0)\n if mac:\n return mac\n \n return None\n \ndef _ipconfig_getnode():\n ''\n import os,re\n dirs=['',r'c:\\windows\\system32',r'c:\\winnt\\system32']\n try :\n import ctypes\n buffer=ctypes.create_string_buffer(300)\n ctypes.windll.kernel32.GetSystemDirectoryA(buffer,300)\n dirs.insert(0,buffer.value.decode('mbcs'))\n except :\n pass\n \n \n \n \n \n \n \n \n \n \n \n \n \ndef _netbios_getnode():\n ''\n \n import win32wnet,netbios\n ncb=netbios.NCB()\n ncb.Command=netbios.NCBENUM\n ncb.Buffer=adapters=netbios.LANA_ENUM()\n adapters._pack()\n if win32wnet.Netbios(ncb)!=0:\n return\n adapters._unpack()\n for i in range(adapters.length):\n ncb.Reset()\n ncb.Command=netbios.NCBRESET\n ncb.Lana_num=ord(adapters.lana[i])\n if win32wnet.Netbios(ncb)!=0:\n continue\n ncb.Reset()\n ncb.Command=netbios.NCBASTAT\n ncb.Lana_num=ord(adapters.lana[i])\n ncb.Callname='*'.ljust(16)\n ncb.Buffer=status=netbios.ADAPTER_STATUS()\n if win32wnet.Netbios(ncb)!=0:\n continue\n status._unpack()\n bytes=status.adapter_address\n return ((bytes[0]<<40)+(bytes[1]<<32)+(bytes[2]<<24)+\n (bytes[3]<<16)+(bytes[4]<<8)+bytes[5])\n \n \n \n \n \n_uuid_generate_random=_uuid_generate_time=_UuidCreate=None\ntry :\n import ctypes,ctypes.util\n \n \n \n for libname in ['uuid','c']:\n try :\n lib=ctypes.CDLL(ctypes.util.find_library(libname))\n except :\n continue\n if hasattr(lib,'uuid_generate_random'):\n _uuid_generate_random=lib.uuid_generate_random\n if hasattr(lib,'uuid_generate_time'):\n _uuid_generate_time=lib.uuid_generate_time\n if _uuid_generate_random is not None :\n break\n \n \n \n \n \n \n \n \n import sys\n if sys.platform =='darwin':\n import os\n if int(os.uname().release.split('.')[0])>=9:\n _uuid_generate_random=_uuid_generate_time=None\n \n \n \n \n \n \n \n \n \n try :\n lib=ctypes.windll.rpcrt4\n except :\n lib=None\n _UuidCreate=getattr(lib,'UuidCreateSequential',\n getattr(lib,'UuidCreate',None ))\nexcept :\n pass\n \ndef _unixdll_getnode():\n ''\n _buffer=ctypes.create_string_buffer(16)\n _uuid_generate_time(_buffer)\n return UUID(bytes=bytes_(_buffer.raw)).node\n \ndef _windll_getnode():\n ''\n _buffer=ctypes.create_string_buffer(16)\n if _UuidCreate(_buffer)==0:\n return UUID(bytes=bytes_(_buffer.raw)).node\n \ndef _random_getnode():\n ''\n import random\n return random.randrange(0,1 <<48)|0x010000000000\n \n_node=None\n\ndef getnode():\n ''\n\n\n\n\n\n \n \n global _node\n if _node is not None :\n return _node\n \n import sys\n if sys.platform =='win32':\n getters=[_windll_getnode,_netbios_getnode,_ipconfig_getnode]\n else :\n getters=[_unixdll_getnode,_ifconfig_getnode]\n \n for getter in getters+[_random_getnode]:\n try :\n _node=getter()\n except :\n continue\n if _node is not None :\n return _node\n \n_last_timestamp=None\n\ndef uuid1(node=None ,clock_seq=None ):\n ''\n\n\n \n \n \n \n if _uuid_generate_time and node is clock_seq is None :\n _buffer=ctypes.create_string_buffer(16)\n _uuid_generate_time(_buffer)\n return UUID(bytes=bytes_(_buffer.raw))\n \n global _last_timestamp\n import time\n nanoseconds=int(time.time()*1e9)\n \n \n timestamp=int(nanoseconds /100)+0x01b21dd213814000\n if _last_timestamp is not None and timestamp <=_last_timestamp:\n timestamp=_last_timestamp+1\n _last_timestamp=timestamp\n if clock_seq is None :\n import random\n clock_seq=random.randrange(1 <<14)\n time_low=timestamp&0xffffffff\n time_mid=(timestamp >>32)&0xffff\n time_hi_version=(timestamp >>48)&0x0fff\n clock_seq_low=clock_seq&0xff\n clock_seq_hi_variant=(clock_seq >>8)&0x3f\n if node is None :\n node=getnode()\n return UUID(fields=(time_low,time_mid,time_hi_version,\n clock_seq_hi_variant,clock_seq_low,node),version=1)\n \ndef uuid3(namespace,name):\n ''\n from hashlib import md5\n hash=md5(namespace.bytes+bytes(name,\"utf-8\")).digest()\n return UUID(bytes=hash[:16],version=3)\n \ndef uuid4():\n ''\n \n \n if _uuid_generate_random:\n _buffer=ctypes.create_string_buffer(16)\n _uuid_generate_random(_buffer)\n return UUID(bytes=bytes_(_buffer.raw))\n \n \n try :\n import os\n return UUID(bytes=os.urandom(16),version=4)\n except :\n import random\n bytes=bytes_(random.randrange(256)for i in range(16))\n return UUID(bytes=bytes,version=4)\n \ndef uuid5(namespace,name):\n ''\n from hashlib import sha1\n hash=sha1(namespace.bytes+bytes(name,\"utf-8\")).digest()\n return UUID(bytes=hash[:16],version=5)\n \n \n \nNAMESPACE_DNS=UUID('6ba7b810-9dad-11d1-80b4-00c04fd430c8')\nNAMESPACE_URL=UUID('6ba7b811-9dad-11d1-80b4-00c04fd430c8')\nNAMESPACE_OID=UUID('6ba7b812-9dad-11d1-80b4-00c04fd430c8')\nNAMESPACE_X500=UUID('6ba7b814-9dad-11d1-80b4-00c04fd430c8')\n"], "asyncio.test_utils": [".py", "''\n\nimport collections\nimport contextlib\nimport io\nimport logging\nimport os\nimport re\nimport socket\nimport socketserver\nimport sys\nimport tempfile\nimport threading\nimport time\nimport unittest\nfrom unittest import mock\n\nfrom http.server import HTTPServer\nfrom wsgiref.simple_server import WSGIRequestHandler,WSGIServer\n\ntry :\n import ssl\nexcept ImportError:\n ssl=None\n \nfrom .import base_events\nfrom .import events\nfrom .import futures\nfrom .import selectors\nfrom .import tasks\nfrom .coroutines import coroutine\nfrom .log import logger\n\n\nif sys.platform =='win32':\n from .windows_utils import socketpair\nelse :\n from socket import socketpair\n \n \ndef dummy_ssl_context():\n if ssl is None :\n return None\n else :\n return ssl.SSLContext(ssl.PROTOCOL_SSLv23)\n \n \ndef run_briefly(loop):\n @coroutine\n def once():\n pass\n gen=once()\n t=loop.create_task(gen)\n \n \n t._log_destroy_pending=False\n try :\n loop.run_until_complete(t)\n finally :\n gen.close()\n \n \ndef run_until(loop,pred,timeout=30):\n deadline=time.time()+timeout\n while not pred():\n if timeout is not None :\n timeout=deadline -time.time()\n if timeout <=0:\n raise futures.TimeoutError()\n loop.run_until_complete(tasks.sleep(0.001,loop=loop))\n \n \ndef run_once(loop):\n ''\n\n\n\n \n loop.stop()\n loop.run_forever()\n \n \nclass SilentWSGIRequestHandler(WSGIRequestHandler):\n\n def get_stderr(self):\n return io.StringIO()\n \n def log_message(self,format,*args):\n pass\n \n \nclass SilentWSGIServer(WSGIServer):\n\n request_timeout=2\n \n def get_request(self):\n request,client_addr=super().get_request()\n request.settimeout(self.request_timeout)\n return request,client_addr\n \n def handle_error(self,request,client_address):\n pass\n \n \nclass SSLWSGIServerMixin:\n\n def finish_request(self,request,client_address):\n \n \n \n \n here=os.path.join(os.path.dirname(__file__),'..','tests')\n if not os.path.isdir(here):\n here=os.path.join(os.path.dirname(os.__file__),\n 'test','test_asyncio')\n keyfile=os.path.join(here,'ssl_key.pem')\n certfile=os.path.join(here,'ssl_cert.pem')\n ssock=ssl.wrap_socket(request,\n keyfile=keyfile,\n certfile=certfile,\n server_side=True )\n try :\n self.RequestHandlerClass(ssock,client_address,self)\n ssock.close()\n except OSError:\n \n pass\n \n \nclass SSLWSGIServer(SSLWSGIServerMixin,SilentWSGIServer):\n pass\n \n \ndef _run_test_server(*,address,use_ssl=False ,server_cls,server_ssl_cls):\n\n def app(environ,start_response):\n status='200 OK'\n headers=[('Content-type','text/plain')]\n start_response(status,headers)\n return [b'Test message']\n \n \n \n server_class=server_ssl_cls if use_ssl else server_cls\n httpd=server_class(address,SilentWSGIRequestHandler)\n httpd.set_app(app)\n httpd.address=httpd.server_address\n server_thread=threading.Thread(\n target=lambda :httpd.serve_forever(poll_interval=0.05))\n server_thread.start()\n try :\n yield httpd\n finally :\n httpd.shutdown()\n httpd.server_close()\n server_thread.join()\n \n \nif hasattr(socket,'AF_UNIX'):\n\n class UnixHTTPServer(socketserver.UnixStreamServer,HTTPServer):\n \n def server_bind(self):\n socketserver.UnixStreamServer.server_bind(self)\n self.server_name='127.0.0.1'\n self.server_port=80\n \n \n class UnixWSGIServer(UnixHTTPServer,WSGIServer):\n \n request_timeout=2\n \n def server_bind(self):\n UnixHTTPServer.server_bind(self)\n self.setup_environ()\n \n def get_request(self):\n request,client_addr=super().get_request()\n request.settimeout(self.request_timeout)\n \n \n \n \n \n \n return request,('127.0.0.1','')\n \n \n class SilentUnixWSGIServer(UnixWSGIServer):\n \n def handle_error(self,request,client_address):\n pass\n \n \n class UnixSSLWSGIServer(SSLWSGIServerMixin,SilentUnixWSGIServer):\n pass\n \n \n def gen_unix_socket_path():\n with tempfile.NamedTemporaryFile()as file:\n return file.name\n \n \n @contextlib.contextmanager\n def unix_socket_path():\n path=gen_unix_socket_path()\n try :\n yield path\n finally :\n try :\n os.unlink(path)\n except OSError:\n pass\n \n \n @contextlib.contextmanager\n def run_test_unix_server(*,use_ssl=False ):\n with unix_socket_path()as path:\n yield from _run_test_server(address=path,use_ssl=use_ssl,\n server_cls=SilentUnixWSGIServer,\n server_ssl_cls=UnixSSLWSGIServer)\n \n \n@contextlib.contextmanager\ndef run_test_server(*,host='127.0.0.1',port=0,use_ssl=False ):\n yield from _run_test_server(address=(host,port),use_ssl=use_ssl,\n server_cls=SilentWSGIServer,\n server_ssl_cls=SSLWSGIServer)\n \n \ndef make_test_protocol(base):\n dct={}\n for name in dir(base):\n if name.startswith('__')and name.endswith('__'):\n \n continue\n dct[name]=MockCallback(return_value=None )\n return type('TestProtocol',(base,)+base.__bases__,dct)()\n \n \nclass TestSelector(selectors.BaseSelector):\n\n def __init__(self):\n self.keys={}\n \n def register(self,fileobj,events,data=None ):\n key=selectors.SelectorKey(fileobj,0,events,data)\n self.keys[fileobj]=key\n return key\n \n def unregister(self,fileobj):\n return self.keys.pop(fileobj)\n \n def select(self,timeout):\n return []\n \n def get_map(self):\n return self.keys\n \n \nclass TestLoop(base_events.BaseEventLoop):\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n \n def __init__(self,gen=None ):\n super().__init__()\n \n if gen is None :\n def gen():\n yield\n self._check_on_close=False\n else :\n self._check_on_close=True\n \n self._gen=gen()\n next(self._gen)\n self._time=0\n self._clock_resolution=1e-9\n self._timers=[]\n self._selector=TestSelector()\n \n self.readers={}\n self.writers={}\n self.reset_counters()\n \n def time(self):\n return self._time\n \n def advance_time(self,advance):\n ''\n if advance:\n self._time +=advance\n \n def close(self):\n super().close()\n if self._check_on_close:\n try :\n self._gen.send(0)\n except StopIteration:\n pass\n else :\n raise AssertionError(\"Time generator is not finished\")\n \n def add_reader(self,fd,callback,*args):\n self.readers[fd]=events.Handle(callback,args,self)\n \n def remove_reader(self,fd):\n self.remove_reader_count[fd]+=1\n if fd in self.readers:\n del self.readers[fd]\n return True\n else :\n return False\n \n def assert_reader(self,fd,callback,*args):\n assert fd in self.readers,'fd {} is not registered'.format(fd)\n handle=self.readers[fd]\n assert handle._callback ==callback,'{!r} != {!r}'.format(\n handle._callback,callback)\n assert handle._args ==args,'{!r} != {!r}'.format(\n handle._args,args)\n \n def add_writer(self,fd,callback,*args):\n self.writers[fd]=events.Handle(callback,args,self)\n \n def remove_writer(self,fd):\n self.remove_writer_count[fd]+=1\n if fd in self.writers:\n del self.writers[fd]\n return True\n else :\n return False\n \n def assert_writer(self,fd,callback,*args):\n assert fd in self.writers,'fd {} is not registered'.format(fd)\n handle=self.writers[fd]\n assert handle._callback ==callback,'{!r} != {!r}'.format(\n handle._callback,callback)\n assert handle._args ==args,'{!r} != {!r}'.format(\n handle._args,args)\n \n def reset_counters(self):\n self.remove_reader_count=collections.defaultdict(int)\n self.remove_writer_count=collections.defaultdict(int)\n \n def _run_once(self):\n super()._run_once()\n for when in self._timers:\n advance=self._gen.send(when)\n self.advance_time(advance)\n self._timers=[]\n \n def call_at(self,when,callback,*args):\n self._timers.append(when)\n return super().call_at(when,callback,*args)\n \n def _process_events(self,event_list):\n return\n \n def _write_to_self(self):\n pass\n \n \ndef MockCallback(**kwargs):\n return mock.Mock(spec=['__call__'],**kwargs)\n \n \nclass MockPattern(str):\n ''\n\n\n\n\n\n\n \n def __eq__(self,other):\n return bool(re.search(str(self),other,re.S))\n \n \ndef get_function_source(func):\n source=events._get_function_source(func)\n if source is None :\n raise ValueError(\"unable to get the source of %r\"%(func,))\n return source\n \n \nclass TestCase(unittest.TestCase):\n def set_event_loop(self,loop,*,cleanup=True ):\n assert loop is not None\n \n events.set_event_loop(None )\n if cleanup:\n self.addCleanup(loop.close)\n \n def new_test_loop(self,gen=None ):\n loop=TestLoop(gen)\n self.set_event_loop(loop)\n return loop\n \n def tearDown(self):\n events.set_event_loop(None )\n \n \n \n self.assertEqual(sys.exc_info(),(None ,None ,None ))\n \n \n@contextlib.contextmanager\ndef disable_logger():\n ''\n\n\n \n old_level=logger.level\n try :\n logger.setLevel(logging.CRITICAL+1)\n yield\n finally :\n logger.setLevel(old_level)\n \ndef mock_nonblocking_socket():\n ''\n sock=mock.Mock(socket.socket)\n sock.gettimeout.return_value=0.0\n return sock\n \n \ndef force_legacy_ssl_support():\n return mock.patch('asyncio.sslproto._is_sslproto_available',\n return_value=False )\n"], "crypto_js.rollups.md5": [".js", "/*\nCryptoJS v3.1.2\ncode.google.com/p/crypto-js\n(c) 2009-2013 by Jeff Mott. All rights reserved.\ncode.google.com/p/crypto-js/wiki/License\n*/\nvar CryptoJS=CryptoJS||function(s,p){var m={},l=m.lib={},n=function(){},r=l.Base={extend:function(b){n.prototype=this;var h=new n;b&&h.mixIn(b);h.hasOwnProperty(\"init\")||(h.init=function(){h.$super.init.apply(this,arguments)});h.init.prototype=h;h.$super=this;return h},create:function(){var b=this.extend();b.init.apply(b,arguments);return b},init:function(){},mixIn:function(b){for(var h in b)b.hasOwnProperty(h)&&(this[h]=b[h]);b.hasOwnProperty(\"toString\")&&(this.toString=b.toString)},clone:function(){return this.init.prototype.extend(this)}},\nq=l.WordArray=r.extend({init:function(b,h){b=this.words=b||[];this.sigBytes=h!=p?h:4*b.length},toString:function(b){return(b||t).stringify(this)},concat:function(b){var h=this.words,a=b.words,j=this.sigBytes;b=b.sigBytes;this.clamp();if(j%4)for(var g=0;g>>2]|=(a[g>>>2]>>>24-8*(g%4)&255)<<24-8*((j+g)%4);else if(65535>>2]=a[g>>>2];else h.push.apply(h,a);this.sigBytes+=b;return this},clamp:function(){var b=this.words,h=this.sigBytes;b[h>>>2]&=4294967295<<\n32-8*(h%4);b.length=s.ceil(h/4)},clone:function(){var b=r.clone.call(this);b.words=this.words.slice(0);return b},random:function(b){for(var h=[],a=0;a>>2]>>>24-8*(j%4)&255;g.push((k>>>4).toString(16));g.push((k&15).toString(16))}return g.join(\"\")},parse:function(b){for(var a=b.length,g=[],j=0;j>>3]|=parseInt(b.substr(j,\n2),16)<<24-4*(j%8);return new q.init(g,a/2)}},a=v.Latin1={stringify:function(b){var a=b.words;b=b.sigBytes;for(var g=[],j=0;j>>2]>>>24-8*(j%4)&255));return g.join(\"\")},parse:function(b){for(var a=b.length,g=[],j=0;j>>2]|=(b.charCodeAt(j)&255)<<24-8*(j%4);return new q.init(g,a)}},u=v.Utf8={stringify:function(b){try{return decodeURIComponent(escape(a.stringify(b)))}catch(g){throw Error(\"Malformed UTF-8 data\");}},parse:function(b){return a.parse(unescape(encodeURIComponent(b)))}},\ng=l.BufferedBlockAlgorithm=r.extend({reset:function(){this._data=new q.init;this._nDataBytes=0},_append:function(b){\"string\"==typeof b&&(b=u.parse(b));this._data.concat(b);this._nDataBytes+=b.sigBytes},_process:function(b){var a=this._data,g=a.words,j=a.sigBytes,k=this.blockSize,m=j/(4*k),m=b?s.ceil(m):s.max((m|0)-this._minBufferSize,0);b=m*k;j=s.min(4*b,j);if(b){for(var l=0;l>>32-j)+k}function m(a,k,b,h,l,j,m){a=a+(k&h|b&~h)+l+m;return(a<>>32-j)+k}function l(a,k,b,h,l,j,m){a=a+(k^b^h)+l+m;return(a<>>32-j)+k}function n(a,k,b,h,l,j,m){a=a+(b^(k|~h))+l+m;return(a<>>32-j)+k}for(var r=CryptoJS,q=r.lib,v=q.WordArray,t=q.Hasher,q=r.algo,a=[],u=0;64>u;u++)a[u]=4294967296*s.abs(s.sin(u+1))|0;q=q.MD5=t.extend({_doReset:function(){this._hash=new v.init([1732584193,4023233417,2562383102,271733878])},\n_doProcessBlock:function(g,k){for(var b=0;16>b;b++){var h=k+b,w=g[h];g[h]=(w<<8|w>>>24)&16711935|(w<<24|w>>>8)&4278255360}var b=this._hash.words,h=g[k+0],w=g[k+1],j=g[k+2],q=g[k+3],r=g[k+4],s=g[k+5],t=g[k+6],u=g[k+7],v=g[k+8],x=g[k+9],y=g[k+10],z=g[k+11],A=g[k+12],B=g[k+13],C=g[k+14],D=g[k+15],c=b[0],d=b[1],e=b[2],f=b[3],c=p(c,d,e,f,h,7,a[0]),f=p(f,c,d,e,w,12,a[1]),e=p(e,f,c,d,j,17,a[2]),d=p(d,e,f,c,q,22,a[3]),c=p(c,d,e,f,r,7,a[4]),f=p(f,c,d,e,s,12,a[5]),e=p(e,f,c,d,t,17,a[6]),d=p(d,e,f,c,u,22,a[7]),\nc=p(c,d,e,f,v,7,a[8]),f=p(f,c,d,e,x,12,a[9]),e=p(e,f,c,d,y,17,a[10]),d=p(d,e,f,c,z,22,a[11]),c=p(c,d,e,f,A,7,a[12]),f=p(f,c,d,e,B,12,a[13]),e=p(e,f,c,d,C,17,a[14]),d=p(d,e,f,c,D,22,a[15]),c=m(c,d,e,f,w,5,a[16]),f=m(f,c,d,e,t,9,a[17]),e=m(e,f,c,d,z,14,a[18]),d=m(d,e,f,c,h,20,a[19]),c=m(c,d,e,f,s,5,a[20]),f=m(f,c,d,e,y,9,a[21]),e=m(e,f,c,d,D,14,a[22]),d=m(d,e,f,c,r,20,a[23]),c=m(c,d,e,f,x,5,a[24]),f=m(f,c,d,e,C,9,a[25]),e=m(e,f,c,d,q,14,a[26]),d=m(d,e,f,c,v,20,a[27]),c=m(c,d,e,f,B,5,a[28]),f=m(f,c,\nd,e,j,9,a[29]),e=m(e,f,c,d,u,14,a[30]),d=m(d,e,f,c,A,20,a[31]),c=l(c,d,e,f,s,4,a[32]),f=l(f,c,d,e,v,11,a[33]),e=l(e,f,c,d,z,16,a[34]),d=l(d,e,f,c,C,23,a[35]),c=l(c,d,e,f,w,4,a[36]),f=l(f,c,d,e,r,11,a[37]),e=l(e,f,c,d,u,16,a[38]),d=l(d,e,f,c,y,23,a[39]),c=l(c,d,e,f,B,4,a[40]),f=l(f,c,d,e,h,11,a[41]),e=l(e,f,c,d,q,16,a[42]),d=l(d,e,f,c,t,23,a[43]),c=l(c,d,e,f,x,4,a[44]),f=l(f,c,d,e,A,11,a[45]),e=l(e,f,c,d,D,16,a[46]),d=l(d,e,f,c,j,23,a[47]),c=n(c,d,e,f,h,6,a[48]),f=n(f,c,d,e,u,10,a[49]),e=n(e,f,c,d,\nC,15,a[50]),d=n(d,e,f,c,s,21,a[51]),c=n(c,d,e,f,A,6,a[52]),f=n(f,c,d,e,q,10,a[53]),e=n(e,f,c,d,y,15,a[54]),d=n(d,e,f,c,w,21,a[55]),c=n(c,d,e,f,v,6,a[56]),f=n(f,c,d,e,D,10,a[57]),e=n(e,f,c,d,t,15,a[58]),d=n(d,e,f,c,B,21,a[59]),c=n(c,d,e,f,r,6,a[60]),f=n(f,c,d,e,z,10,a[61]),e=n(e,f,c,d,j,15,a[62]),d=n(d,e,f,c,x,21,a[63]);b[0]=b[0]+c|0;b[1]=b[1]+d|0;b[2]=b[2]+e|0;b[3]=b[3]+f|0},_doFinalize:function(){var a=this._data,k=a.words,b=8*this._nDataBytes,h=8*a.sigBytes;k[h>>>5]|=128<<24-h%32;var l=s.floor(b/\n4294967296);k[(h+64>>>9<<4)+15]=(l<<8|l>>>24)&16711935|(l<<24|l>>>8)&4278255360;k[(h+64>>>9<<4)+14]=(b<<8|b>>>24)&16711935|(b<<24|b>>>8)&4278255360;a.sigBytes=4*(k.length+1);this._process();a=this._hash;k=a.words;for(b=0;4>b;b++)h=k[b],k[b]=(h<<8|h>>>24)&16711935|(h<<24|h>>>8)&4278255360;return a},clone:function(){var a=t.clone.call(this);a._hash=this._hash.clone();return a}});r.MD5=t._createHelper(q);r.HmacMD5=t._createHmacHelper(q)})(Math);\n"], "csv": [".py", "\n\"\"\"\ncsv.py - read/write/investigate CSV files\n\"\"\"\n\nimport re\nfrom _csv import Error,__version__,writer,reader,register_dialect,unregister_dialect,get_dialect,list_dialects,field_size_limit,QUOTE_MINIMAL,QUOTE_ALL,QUOTE_NONNUMERIC,QUOTE_NONE,__doc__\nfrom _csv import Dialect as _Dialect\n\nfrom io import StringIO\n\n__all__=[\"QUOTE_MINIMAL\",\"QUOTE_ALL\",\"QUOTE_NONNUMERIC\",\"QUOTE_NONE\",\n\"Error\",\"Dialect\",\"__doc__\",\"excel\",\"excel_tab\",\n\"field_size_limit\",\"reader\",\"writer\",\n\"register_dialect\",\"get_dialect\",\"list_dialects\",\"Sniffer\",\n\"unregister_dialect\",\"__version__\",\"DictReader\",\"DictWriter\"]\n\nclass Dialect:\n ''\n\n\n\n\n\n \n _name=\"\"\n _valid=False\n \n delimiter=None\n quotechar=None\n escapechar=None\n doublequote=None\n skipinitialspace=None\n lineterminator=None\n quoting=None\n \n def __init__(self):\n if self.__class__ !=Dialect:\n self._valid=True\n self._validate()\n \n def _validate(self):\n try :\n _Dialect(self)\n except TypeError as e:\n \n raise Error(str(e))\n \nclass excel(Dialect):\n ''\n delimiter=','\n quotechar='\"'\n doublequote=True\n skipinitialspace=False\n lineterminator='\\r\\n'\n quoting=QUOTE_MINIMAL\nregister_dialect(\"excel\",excel)\n\nclass excel_tab(excel):\n ''\n delimiter='\\t'\nregister_dialect(\"excel-tab\",excel_tab)\n\nclass unix_dialect(Dialect):\n ''\n delimiter=','\n quotechar='\"'\n doublequote=True\n skipinitialspace=False\n lineterminator='\\n'\n quoting=QUOTE_ALL\nregister_dialect(\"unix\",unix_dialect)\n\n\nclass DictReader:\n def __init__(self,f,fieldnames=None ,restkey=None ,restval=None ,\n dialect=\"excel\",*args,**kwds):\n self._fieldnames=fieldnames\n self.restkey=restkey\n self.restval=restval\n self.reader=reader(f,dialect,*args,**kwds)\n self.dialect=dialect\n self.line_num=0\n \n def __iter__(self):\n return self\n \n @property\n def fieldnames(self):\n if self._fieldnames is None :\n try :\n self._fieldnames=next(self.reader)\n except StopIteration:\n pass\n self.line_num=self.reader.line_num\n return self._fieldnames\n \n @fieldnames.setter\n def fieldnames(self,value):\n self._fieldnames=value\n \n def __next__(self):\n if self.line_num ==0:\n \n self.fieldnames\n row=next(self.reader)\n self.line_num=self.reader.line_num\n \n \n \n \n while row ==[]:\n row=next(self.reader)\n d=dict(zip(self.fieldnames,row))\n lf=len(self.fieldnames)\n lr=len(row)\n if lf lr:\n for key in self.fieldnames[lr:]:\n d[key]=self.restval\n return d\n \n \nclass DictWriter:\n def __init__(self,f,fieldnames,restval=\"\",extrasaction=\"raise\",\n dialect=\"excel\",*args,**kwds):\n self.fieldnames=fieldnames\n self.restval=restval\n if extrasaction.lower()not in (\"raise\",\"ignore\"):\n raise ValueError(\"extrasaction (%s) must be 'raise' or 'ignore'\"\n %extrasaction)\n self.extrasaction=extrasaction\n self.writer=writer(f,dialect,*args,**kwds)\n \n def writeheader(self):\n header=dict(zip(self.fieldnames,self.fieldnames))\n self.writerow(header)\n \n def _dict_to_list(self,rowdict):\n if self.extrasaction ==\"raise\":\n wrong_fields=[k for k in rowdict if k not in self.fieldnames]\n if wrong_fields:\n raise ValueError(\"dict contains fields not in fieldnames: \"\n +\", \".join(wrong_fields))\n return [rowdict.get(key,self.restval)for key in self.fieldnames]\n \n def writerow(self,rowdict):\n return self.writer.writerow(self._dict_to_list(rowdict))\n \n def writerows(self,rowdicts):\n rows=[]\n for rowdict in rowdicts:\n rows.append(self._dict_to_list(rowdict))\n return self.writer.writerows(rows)\n \n \ntry :\n complex\nexcept NameError:\n complex=float\n \nclass Sniffer:\n ''\n\n\n \n def __init__(self):\n \n self.preferred=[',','\\t',';',' ',':']\n \n \n def sniff(self,sample,delimiters=None ):\n ''\n\n \n \n quotechar,doublequote,delimiter,skipinitialspace= self._guess_quote_and_delimiter(sample,delimiters)\n if not delimiter:\n delimiter,skipinitialspace=self._guess_delimiter(sample,\n delimiters)\n \n if not delimiter:\n raise Error(\"Could not determine delimiter\")\n \n class dialect(Dialect):\n _name=\"sniffed\"\n lineterminator='\\r\\n'\n quoting=QUOTE_MINIMAL\n \n \n dialect.doublequote=doublequote\n dialect.delimiter=delimiter\n \n dialect.quotechar=quotechar or'\"'\n dialect.skipinitialspace=skipinitialspace\n \n return dialect\n \n \n def _guess_quote_and_delimiter(self,data,delimiters):\n ''\n\n\n\n\n\n\n\n\n \n \n matches=[]\n for restr in ('(?P[^\\w\\n\"\\'])(?P ?)(?P[\"\\']).*?(?P=quote)(?P=delim)',\n '(?:^|\\n)(?P[\"\\']).*?(?P=quote)(?P[^\\w\\n\"\\'])(?P ?)',\n '(?P>[^\\w\\n\"\\'])(?P ?)(?P[\"\\']).*?(?P=quote)(?:$|\\n)',\n '(?:^|\\n)(?P[\"\\']).*?(?P=quote)(?:$|\\n)'):\n regexp=re.compile(restr,re.DOTALL |re.MULTILINE)\n matches=regexp.findall(data)\n if matches:\n break\n \n if not matches:\n \n return ('',False ,None ,0)\n quotes={}\n delims={}\n spaces=0\n for m in matches:\n n=regexp.groupindex['quote']-1\n key=m[n]\n if key:\n quotes[key]=quotes.get(key,0)+1\n try :\n n=regexp.groupindex['delim']-1\n key=m[n]\n except KeyError:\n continue\n if key and (delimiters is None or key in delimiters):\n delims[key]=delims.get(key,0)+1\n try :\n n=regexp.groupindex['space']-1\n except KeyError:\n continue\n if m[n]:\n spaces +=1\n \n quotechar=max(quotes,key=quotes.get)\n \n if delims:\n delim=max(delims,key=delims.get)\n skipinitialspace=delims[delim]==spaces\n if delim =='\\n':\n delim=''\n else :\n \n delim=''\n skipinitialspace=0\n \n \n \n dq_regexp=re.compile(\n r\"((%(delim)s)|^)\\W*%(quote)s[^%(delim)s\\n]*%(quote)s[^%(delim)s\\n]*%(quote)s\\W*((%(delim)s)|$)\"% {'delim':re.escape(delim),'quote':quotechar},re.MULTILINE)\n \n \n \n if dq_regexp.search(data):\n doublequote=True\n else :\n doublequote=False\n \n return (quotechar,doublequote,delim,skipinitialspace)\n \n \n def _guess_delimiter(self,data,delimiters):\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n \n data=list(filter(None ,data.split('\\n')))\n \n ascii=[chr(c)for c in range(127)]\n \n \n chunkLength=min(10,len(data))\n iteration=0\n charFrequency={}\n modes={}\n delims={}\n start,end=0,min(chunkLength,len(data))\n while start 1:\n modes[char]=max(items,key=lambda x:x[1])\n \n \n items.remove(modes[char])\n modes[char]=(modes[char][0],modes[char][1]\n -sum(item[1]for item in items))\n else :\n modes[char]=items[0]\n \n \n modeList=modes.items()\n total=float(chunkLength *iteration)\n \n consistency=1.0\n \n threshold=0.9\n while len(delims)==0 and consistency >=threshold:\n for k,v in modeList:\n if v[0]>0 and v[1]>0:\n if ((v[1]/total)>=consistency and\n (delimiters is None or k in delimiters)):\n delims[k]=v\n consistency -=0.01\n \n if len(delims)==1:\n delim=list(delims.keys())[0]\n skipinitialspace=(data[0].count(delim)==\n data[0].count(\"%c \"%delim))\n return (delim,skipinitialspace)\n \n \n start=end\n end +=chunkLength\n \n if not delims:\n return ('',0)\n \n \n if len(delims)>1:\n for d in self.preferred:\n if d in delims.keys():\n skipinitialspace=(data[0].count(d)==\n data[0].count(\"%c \"%d))\n return (d,skipinitialspace)\n \n \n \n items=[(v,k)for (k,v)in delims.items()]\n items.sort()\n delim=items[-1][1]\n \n skipinitialspace=(data[0].count(delim)==\n data[0].count(\"%c \"%delim))\n return (delim,skipinitialspace)\n \n \n def has_header(self,sample):\n \n \n \n \n \n \n \n \n \n rdr=reader(StringIO(sample),self.sniff(sample))\n \n header=next(rdr)\n \n columns=len(header)\n columnTypes={}\n for i in range(columns):columnTypes[i]=None\n \n checked=0\n for row in rdr:\n \n if checked >20:\n break\n checked +=1\n \n if len(row)!=columns:\n continue\n \n for col in list(columnTypes.keys()):\n \n for thisType in [int,float,complex]:\n try :\n thisType(row[col])\n break\n except (ValueError,OverflowError):\n pass\n else :\n \n thisType=len(row[col])\n \n if thisType !=columnTypes[col]:\n if columnTypes[col]is None :\n columnTypes[col]=thisType\n else :\n \n \n del columnTypes[col]\n \n \n \n hasHeader=0\n for col,colType in columnTypes.items():\n if type(colType)==type(0):\n if len(header[col])!=colType:\n hasHeader +=1\n else :\n hasHeader -=1\n else :\n try :\n colType(header[col])\n except (ValueError,TypeError):\n hasHeader +=1\n else :\n hasHeader -=1\n \n return hasHeader >0\n"], "string": [".py", "''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\nimport _string\n\n\nwhitespace=' \\t\\n\\r\\v\\f'\nascii_lowercase='abcdefghijklmnopqrstuvwxyz'\nascii_uppercase='ABCDEFGHIJKLMNOPQRSTUVWXYZ'\nascii_letters=ascii_lowercase+ascii_uppercase\ndigits='0123456789'\nhexdigits=digits+'abcdef'+'ABCDEF'\noctdigits='01234567'\npunctuation=\"\"\"!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~\"\"\"\nprintable=digits+ascii_letters+punctuation+whitespace\n\n\n\n\ndef capwords(s,sep=None ):\n ''\n\n\n\n\n\n\n\n\n \n return (sep or' ').join(x.capitalize()for x in s.split(sep))\n \n \n \nimport re as _re\nfrom collections import ChainMap\n\nclass _TemplateMetaclass(type):\n pattern=r\"\"\"\n %(delim)s(?:\n (?P%(delim)s) | # Escape sequence of two delimiters\n (?P%(id)s) | # delimiter and a Python identifier\n {(?P%(id)s)} | # delimiter and a braced identifier\n (?P) # Other ill-formed delimiter exprs\n )\n \"\"\"\n \n def __init__(cls,name,bases,dct):\n super(_TemplateMetaclass,cls).__init__(name,bases,dct)\n if'pattern'in dct:\n pattern=cls.pattern\n else :\n pattern=_TemplateMetaclass.pattern %{\n 'delim':_re.escape(cls.delimiter),\n 'id':cls.idpattern,\n }\n cls.pattern=_re.compile(pattern,cls.flags |_re.VERBOSE)\n \n \nclass Template(metaclass=_TemplateMetaclass):\n ''\n \n delimiter='$'\n idpattern=r'[_a-z][_a-z0-9]*'\n flags=_re.IGNORECASE\n \n def __init__(self,template):\n self.template=template\n \n \n \n def _invalid(self,mo):\n i=mo.start('invalid')\n lines=self.template[:i].splitlines(keepends=True )\n if not lines:\n colno=1\n lineno=1\n else :\n colno=i -len(''.join(lines[:-1]))\n lineno=len(lines)\n raise ValueError('Invalid placeholder in string: line %d, col %d'%\n (lineno,colno))\n \n def substitute(self,*args,**kws):\n if len(args)>1:\n raise TypeError('Too many positional arguments')\n if not args:\n mapping=kws\n elif kws:\n mapping=ChainMap(kws,args[0])\n else :\n mapping=args[0]\n \n def convert(mo):\n \n named=mo.group('named')or mo.group('braced')\n if named is not None :\n val=mapping[named]\n \n \n return'%s'%(val,)\n if mo.group('escaped')is not None :\n return self.delimiter\n if mo.group('invalid')is not None :\n self._invalid(mo)\n raise ValueError('Unrecognized named group in pattern',\n self.pattern)\n return self.pattern.sub(convert,self.template)\n \n def safe_substitute(self,*args,**kws):\n if len(args)>1:\n raise TypeError('Too many positional arguments')\n if not args:\n mapping=kws\n elif kws:\n mapping=ChainMap(kws,args[0])\n else :\n mapping=args[0]\n \n def convert(mo):\n named=mo.group('named')or mo.group('braced')\n if named is not None :\n try :\n \n \n return'%s'%(mapping[named],)\n except KeyError:\n return mo.group()\n if mo.group('escaped')is not None :\n return self.delimiter\n if mo.group('invalid')is not None :\n return mo.group()\n raise ValueError('Unrecognized named group in pattern',\n self.pattern)\n return self.pattern.sub(convert,self.template)\n \n \n \n \n \n \n \n \n \n \n \n \n \nclass Formatter:\n def format(self,format_string,*args,**kwargs):\n return format_string.format(*args,**kwargs)\n"], "_io": [".py", "''\n\n\n\nimport os\nimport abc\nimport codecs\nimport errno\n\ntry :\n from _thread import allocate_lock as Lock\nexcept ImportError:\n from _dummy_thread import allocate_lock as Lock\n \nimport io\n\n\nSEEK_SET=0\nSEEK_CUR=1\nSEEK_END=2\n\nvalid_seek_flags={0,1,2}\nif hasattr(os,'SEEK_HOLE'):\n valid_seek_flags.add(os.SEEK_HOLE)\n valid_seek_flags.add(os.SEEK_DATA)\n \n \nDEFAULT_BUFFER_SIZE=8 *1024\n\n\n\n\n\n\nBlockingIOError=BlockingIOError\n\n\ndef __open(file,mode=\"r\",buffering=-1,encoding=None ,errors=None ,\nnewline=None ,closefd=True ,opener=None ):\n\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n if not isinstance(file,(str,bytes,int)):\n raise TypeError(\"invalid file: %r\"%file)\n if not isinstance(mode,str):\n raise TypeError(\"invalid mode: %r\"%mode)\n if not isinstance(buffering,int):\n raise TypeError(\"invalid buffering: %r\"%buffering)\n if encoding is not None and not isinstance(encoding,str):\n raise TypeError(\"invalid encoding: %r\"%encoding)\n if errors is not None and not isinstance(errors,str):\n raise TypeError(\"invalid errors: %r\"%errors)\n modes=set(mode)\n if modes -set(\"axrwb+tU\")or len(mode)>len(modes):\n raise ValueError(\"invalid mode: %r\"%mode)\n creating=\"x\"in modes\n reading=\"r\"in modes\n writing=\"w\"in modes\n appending=\"a\"in modes\n updating=\"+\"in modes\n text=\"t\"in modes\n binary=\"b\"in modes\n if\"U\"in modes:\n if creating or writing or appending:\n raise ValueError(\"can't use U and writing mode at once\")\n reading=True\n if text and binary:\n raise ValueError(\"can't have text and binary mode at once\")\n if creating+reading+writing+appending >1:\n raise ValueError(\"can't have read/write/append mode at once\")\n if not (creating or reading or writing or appending):\n raise ValueError(\"must have exactly one of read/write/append mode\")\n if binary and encoding is not None :\n raise ValueError(\"binary mode doesn't take an encoding argument\")\n if binary and errors is not None :\n raise ValueError(\"binary mode doesn't take an errors argument\")\n if binary and newline is not None :\n raise ValueError(\"binary mode doesn't take a newline argument\")\n raw=FileIO(file,\n (creating and\"x\"or\"\")+\n (reading and\"r\"or\"\")+\n (writing and\"w\"or\"\")+\n (appending and\"a\"or\"\")+\n (updating and\"+\"or\"\"),\n closefd,opener=opener)\n line_buffering=False\n if buffering ==1 or buffering <0 and raw.isatty():\n buffering=-1\n line_buffering=True\n if buffering <0:\n buffering=DEFAULT_BUFFER_SIZE\n try :\n bs=os.fstat(raw.fileno()).st_blksize\n except (os.error,AttributeError):\n pass\n else :\n if bs >1:\n buffering=bs\n if buffering <0:\n raise ValueError(\"invalid buffering size\")\n if buffering ==0:\n if binary:\n return raw\n raise ValueError(\"can't have unbuffered text I/O\")\n if updating:\n buffer=BufferedRandom(raw,buffering)\n elif creating or writing or appending:\n buffer=BufferedWriter(raw,buffering)\n elif reading:\n buffer=BufferedReader(raw,buffering)\n else :\n raise ValueError(\"unknown mode: %r\"%mode)\n if binary:\n return buffer\n text=TextIOWrapper(buffer,encoding,errors,newline,line_buffering)\n text.mode=mode\n return text\n \n \nclass DocDescriptor:\n ''\n \n def __get__(self,obj,typ):\n return (\n \"open(file, mode='r', buffering=-1, encoding=None, \"\n \"errors=None, newline=None, closefd=True)\\n\\n\"+\n open.__doc__)\n \nclass OpenWrapper:\n ''\n\n\n\n\n\n \n __doc__=DocDescriptor()\n \n def __new__(cls,*args,**kwargs):\n return open(*args,**kwargs)\n \n \n \n \ntry :\n UnsupportedOperation=io.UnsupportedOperation\nexcept AttributeError:\n class UnsupportedOperation(ValueError,IOError):\n pass\n \n \nclass IOBase(metaclass=abc.ABCMeta):\n\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n \n \n \n def _unsupported(self,name):\n ''\n raise UnsupportedOperation(\"%s.%s() not supported\"%\n (self.__class__.__name__,name))\n \n \n \n def seek(self,pos,whence=0):\n ''\n\n\n\n\n\n\n\n\n\n\n\n \n self._unsupported(\"seek\")\n \n def tell(self):\n ''\n return self.seek(0,1)\n \n def truncate(self,pos=None ):\n ''\n\n\n\n \n self._unsupported(\"truncate\")\n \n \n \n def flush(self):\n ''\n\n\n \n self._checkClosed()\n \n \n __closed=False\n \n def close(self):\n ''\n\n\n \n if not self.__closed:\n try :\n self.flush()\n finally :\n self.__closed=True\n \n def __del__(self):\n ''\n \n \n \n \n \n try :\n self.close()\n except :\n pass\n \n \n \n def seekable(self):\n ''\n\n\n\n \n return False\n \n def _checkSeekable(self,msg=None ):\n ''\n \n if not self.seekable():\n raise UnsupportedOperation(\"File or stream is not seekable.\"\n if msg is None else msg)\n \n def readable(self):\n ''\n\n\n \n return False\n \n def _checkReadable(self,msg=None ):\n ''\n \n if not self.readable():\n raise UnsupportedOperation(\"File or stream is not readable.\"\n if msg is None else msg)\n \n def writable(self):\n ''\n\n\n \n return False\n \n def _checkWritable(self,msg=None ):\n ''\n \n if not self.writable():\n raise UnsupportedOperation(\"File or stream is not writable.\"\n if msg is None else msg)\n \n @property\n def closed(self):\n ''\n\n\n \n return self.__closed\n \n def _checkClosed(self,msg=None ):\n ''\n \n if self.closed:\n raise ValueError(\"I/O operation on closed file.\"\n if msg is None else msg)\n \n \n \n def __enter__(self):\n ''\n self._checkClosed()\n return self\n \n def __exit__(self,*args):\n ''\n self.close()\n \n \n \n \n \n def fileno(self):\n ''\n\n\n \n self._unsupported(\"fileno\")\n \n def isatty(self):\n ''\n\n\n \n self._checkClosed()\n return False\n \n \n \n def readline(self,limit=-1):\n ''\n\n\n\n\n\n\n\n \n \n if hasattr(self,\"peek\"):\n def nreadahead():\n readahead=self.peek(1)\n if not readahead:\n return 1\n n=(readahead.find(b\"\\n\")+1)or len(readahead)\n if limit >=0:\n n=min(n,limit)\n return n\n else :\n def nreadahead():\n return 1\n if limit is None :\n limit=-1\n elif not isinstance(limit,int):\n raise TypeError(\"limit must be an integer\")\n res=bytearray()\n while limit <0 or len(res)=hint:\n break\n return lines\n \n def writelines(self,lines):\n self._checkClosed()\n for line in lines:\n self.write(line)\n \n \n \n \n \nclass RawIOBase(IOBase):\n\n ''\n \n \n \n \n \n \n \n \n \n \n \n def read(self,n=-1):\n ''\n\n\n\n \n if n is None :\n n=-1\n if n <0:\n return self.readall()\n b=bytearray(n.__index__())\n n=self.readinto(b)\n if n is None :\n return None\n del b[n:]\n return bytes(b)\n \n def readall(self):\n ''\n res=bytearray()\n while True :\n data=self.read(DEFAULT_BUFFER_SIZE)\n if not data:\n break\n res +=data\n if res:\n return bytes(res)\n else :\n \n return data\n \n def readinto(self,b):\n ''\n\n\n\n \n self._unsupported(\"readinto\")\n \n def write(self,b):\n ''\n\n\n \n self._unsupported(\"write\")\n \n \n \n \n \n \n \nclass BufferedIOBase(IOBase):\n\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n \n \n def read(self,n=None ):\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n self._unsupported(\"read\")\n \n def read1(self,n=None ):\n ''\n\n \n self._unsupported(\"read1\")\n \n def readinto(self,b):\n ''\n\n\n\n\n\n\n\n\n \n \n data=self.read(len(b))\n n=len(data)\n try :\n b[:n]=data\n except TypeError as err:\n import array\n if not isinstance(b,array.array):\n raise err\n b[:n]=array.array('b',data)\n return n\n \n def write(self,b):\n ''\n\n\n\n\n\n\n \n self._unsupported(\"write\")\n \n def detach(self):\n ''\n\n\n\n\n \n self._unsupported(\"detach\")\n \n \n \n \n \nclass _BufferedIOMixin(BufferedIOBase):\n\n ''\n\n\n\n\n \n \n def __init__(self,raw):\n self._raw=raw\n \n \n \n def seek(self,pos,whence=0):\n new_position=self.raw.seek(pos,whence)\n if new_position <0:\n raise IOError(\"seek() returned an invalid position\")\n return new_position\n \n def tell(self):\n pos=self.raw.tell()\n if pos <0:\n raise IOError(\"tell() returned an invalid position\")\n return pos\n \n def truncate(self,pos=None ):\n \n \n \n self.flush()\n \n if pos is None :\n pos=self.tell()\n \n \n return self.raw.truncate(pos)\n \n \n \n def flush(self):\n if self.closed:\n raise ValueError(\"flush of closed file\")\n self.raw.flush()\n \n def close(self):\n if self.raw is not None and not self.closed:\n try :\n \n self.flush()\n finally :\n self.raw.close()\n \n def detach(self):\n if self.raw is None :\n raise ValueError(\"raw stream already detached\")\n self.flush()\n raw=self._raw\n self._raw=None\n return raw\n \n \n \n def seekable(self):\n return self.raw.seekable()\n \n def readable(self):\n return self.raw.readable()\n \n def writable(self):\n return self.raw.writable()\n \n @property\n def raw(self):\n return self._raw\n \n @property\n def closed(self):\n return self.raw.closed\n \n @property\n def name(self):\n return self.raw.name\n \n @property\n def mode(self):\n return self.raw.mode\n \n def __getstate__(self):\n raise TypeError(\"can not serialize a '{0}' object\"\n .format(self.__class__.__name__))\n \n def __repr__(self):\n clsname=self.__class__.__name__\n try :\n name=self.name\n except AttributeError:\n return\"<_io.{0}>\".format(clsname)\n else :\n return\"<_io.{0} name={1!r}>\".format(clsname,name)\n \n \n \n def fileno(self):\n return self.raw.fileno()\n \n def isatty(self):\n return self.raw.isatty()\n \n \nclass BytesIO(BufferedIOBase):\n\n ''\n \n def __init__(self,initial_bytes=None ):\n buf=bytearray()\n if initial_bytes is not None :\n buf +=initial_bytes\n self._buffer=buf\n self._pos=0\n \n def __getstate__(self):\n if self.closed:\n raise ValueError(\"__getstate__ on closed file\")\n return self.__dict__.copy()\n \n def getvalue(self):\n ''\n \n if self.closed:\n raise ValueError(\"getvalue on closed file\")\n return bytes(self._buffer)\n \n def getbuffer(self):\n ''\n \n return memoryview(self._buffer)\n \n def read(self,n=None ):\n if self.closed:\n raise ValueError(\"read from closed file\")\n if n is None :\n n=-1\n if n <0:\n n=len(self._buffer)\n if len(self._buffer)<=self._pos:\n return b\"\"\n newpos=min(len(self._buffer),self._pos+n)\n b=self._buffer[self._pos:newpos]\n self._pos=newpos\n return bytes(b)\n \n def read1(self,n):\n ''\n \n return self.read(n)\n \n def write(self,b):\n if self.closed:\n raise ValueError(\"write to closed file\")\n if isinstance(b,str):\n raise TypeError(\"can't write str to binary stream\")\n n=len(b)\n if n ==0:\n return 0\n pos=self._pos\n if pos >len(self._buffer):\n \n \n padding=b'\\x00'*(pos -len(self._buffer))\n self._buffer +=padding\n self._buffer[pos:pos+n]=b\n self._pos +=n\n return n\n \n def seek(self,pos,whence=0):\n if self.closed:\n raise ValueError(\"seek on closed file\")\n try :\n pos.__index__\n except AttributeError as err:\n raise TypeError(\"an integer is required\")from err\n if whence ==0:\n if pos <0:\n raise ValueError(\"negative seek position %r\"%(pos,))\n self._pos=pos\n elif whence ==1:\n self._pos=max(0,self._pos+pos)\n elif whence ==2:\n self._pos=max(0,len(self._buffer)+pos)\n else :\n raise ValueError(\"unsupported whence value\")\n return self._pos\n \n def tell(self):\n if self.closed:\n raise ValueError(\"tell on closed file\")\n return self._pos\n \n def truncate(self,pos=None ):\n if self.closed:\n raise ValueError(\"truncate on closed file\")\n if pos is None :\n pos=self._pos\n else :\n try :\n pos.__index__\n except AttributeError as err:\n raise TypeError(\"an integer is required\")from err\n if pos <0:\n raise ValueError(\"negative truncate position %r\"%(pos,))\n del self._buffer[pos:]\n return pos\n \n def readable(self):\n if self.closed:\n raise ValueError(\"I/O operation on closed file.\")\n return True\n \n def writable(self):\n if self.closed:\n raise ValueError(\"I/O operation on closed file.\")\n return True\n \n def seekable(self):\n if self.closed:\n raise ValueError(\"I/O operation on closed file.\")\n return True\n \n \nclass BufferedReader(_BufferedIOMixin):\n\n ''\n\n\n\n\n\n\n \n \n def __init__(self,raw,buffer_size=DEFAULT_BUFFER_SIZE):\n ''\n \n if not raw.readable():\n raise IOError('\"raw\" argument must be readable.')\n \n _BufferedIOMixin.__init__(self,raw)\n if buffer_size <=0:\n raise ValueError(\"invalid buffer size\")\n self.buffer_size=buffer_size\n self._reset_read_buf()\n self._read_lock=Lock()\n \n def _reset_read_buf(self):\n self._read_buf=b\"\"\n self._read_pos=0\n \n def read(self,n=None ):\n ''\n\n\n\n\n\n \n if n is not None and n <-1:\n raise ValueError(\"invalid number of bytes to read\")\n with self._read_lock:\n return self._read_unlocked(n)\n \n def _read_unlocked(self,n=None ):\n nodata_val=b\"\"\n empty_values=(b\"\",None )\n buf=self._read_buf\n pos=self._read_pos\n \n \n if n is None or n ==-1:\n self._reset_read_buf()\n if hasattr(self.raw,'readall'):\n chunk=self.raw.readall()\n if chunk is None :\n return buf[pos:]or None\n else :\n return buf[pos:]+chunk\n chunks=[buf[pos:]]\n current_size=0\n while True :\n \n try :\n chunk=self.raw.read()\n except InterruptedError:\n continue\n if chunk in empty_values:\n nodata_val=chunk\n break\n current_size +=len(chunk)\n chunks.append(chunk)\n return b\"\".join(chunks)or nodata_val\n \n \n avail=len(buf)-pos\n if n <=avail:\n \n self._read_pos +=n\n return buf[pos:pos+n]\n \n \n chunks=[buf[pos:]]\n wanted=max(self.buffer_size,n)\n while avail self.buffer_size:\n \n \n self._flush_unlocked()\n before=len(self._write_buf)\n self._write_buf.extend(b)\n written=len(self._write_buf)-before\n if len(self._write_buf)>self.buffer_size:\n try :\n self._flush_unlocked()\n except BlockingIOError as e:\n if len(self._write_buf)>self.buffer_size:\n \n \n overage=len(self._write_buf)-self.buffer_size\n written -=overage\n self._write_buf=self._write_buf[:self.buffer_size]\n raise BlockingIOError(e.errno,e.strerror,written)\n return written\n \n def truncate(self,pos=None ):\n with self._write_lock:\n self._flush_unlocked()\n if pos is None :\n pos=self.raw.tell()\n return self.raw.truncate(pos)\n \n def flush(self):\n with self._write_lock:\n self._flush_unlocked()\n \n def _flush_unlocked(self):\n if self.closed:\n raise ValueError(\"flush of closed file\")\n while self._write_buf:\n try :\n n=self.raw.write(self._write_buf)\n except InterruptedError:\n continue\n except BlockingIOError:\n raise RuntimeError(\"self.raw should implement RawIOBase: it \"\n \"should not raise BlockingIOError\")\n if n is None :\n raise BlockingIOError(\n errno.EAGAIN,\n \"write could not complete without blocking\",0)\n if n >len(self._write_buf)or n <0:\n raise IOError(\"write() returned incorrect number of bytes\")\n del self._write_buf[:n]\n \n def tell(self):\n return _BufferedIOMixin.tell(self)+len(self._write_buf)\n \n def seek(self,pos,whence=0):\n if whence not in valid_seek_flags:\n raise ValueError(\"invalid whence value\")\n with self._write_lock:\n self._flush_unlocked()\n return _BufferedIOMixin.seek(self,pos,whence)\n \n \nclass BufferedRWPair(BufferedIOBase):\n\n ''\n\n\n\n\n\n\n\n\n \n \n \n \n \n def __init__(self,reader,writer,buffer_size=DEFAULT_BUFFER_SIZE):\n ''\n\n\n \n if not reader.readable():\n raise IOError('\"reader\" argument must be readable.')\n \n if not writer.writable():\n raise IOError('\"writer\" argument must be writable.')\n \n self.reader=BufferedReader(reader,buffer_size)\n self.writer=BufferedWriter(writer,buffer_size)\n \n def read(self,n=None ):\n if n is None :\n n=-1\n return self.reader.read(n)\n \n def readinto(self,b):\n return self.reader.readinto(b)\n \n def write(self,b):\n return self.writer.write(b)\n \n def peek(self,n=0):\n return self.reader.peek(n)\n \n def read1(self,n):\n return self.reader.read1(n)\n \n def readable(self):\n return self.reader.readable()\n \n def writable(self):\n return self.writer.writable()\n \n def flush(self):\n return self.writer.flush()\n \n def close(self):\n self.writer.close()\n self.reader.close()\n \n def isatty(self):\n return self.reader.isatty()or self.writer.isatty()\n \n @property\n def closed(self):\n return self.writer.closed\n \n \nclass BufferedRandom(BufferedWriter,BufferedReader):\n\n ''\n\n\n\n\n \n \n def __init__(self,raw,buffer_size=DEFAULT_BUFFER_SIZE):\n raw._checkSeekable()\n BufferedReader.__init__(self,raw,buffer_size)\n BufferedWriter.__init__(self,raw,buffer_size)\n \n def seek(self,pos,whence=0):\n if whence not in valid_seek_flags:\n raise ValueError(\"invalid whence value\")\n self.flush()\n if self._read_buf:\n \n with self._read_lock:\n self.raw.seek(self._read_pos -len(self._read_buf),1)\n \n \n pos=self.raw.seek(pos,whence)\n with self._read_lock:\n self._reset_read_buf()\n if pos <0:\n raise IOError(\"seek() returned invalid position\")\n return pos\n \n def tell(self):\n if self._write_buf:\n return BufferedWriter.tell(self)\n else :\n return BufferedReader.tell(self)\n \n def truncate(self,pos=None ):\n if pos is None :\n pos=self.tell()\n \n return BufferedWriter.truncate(self,pos)\n \n def read(self,n=None ):\n if n is None :\n n=-1\n self.flush()\n return BufferedReader.read(self,n)\n \n def readinto(self,b):\n self.flush()\n return BufferedReader.readinto(self,b)\n \n def peek(self,n=0):\n self.flush()\n return BufferedReader.peek(self,n)\n \n def read1(self,n):\n self.flush()\n return BufferedReader.read1(self,n)\n \n def write(self,b):\n if self._read_buf:\n \n with self._read_lock:\n self.raw.seek(self._read_pos -len(self._read_buf),1)\n self._reset_read_buf()\n return BufferedWriter.write(self,b)\n \n \nclass TextIOBase(IOBase):\n\n ''\n\n\n\n\n \n \n def read(self,n=-1):\n ''\n\n\n\n\n\n \n self._unsupported(\"read\")\n \n def write(self,s):\n ''\n self._unsupported(\"write\")\n \n def truncate(self,pos=None ):\n ''\n self._unsupported(\"truncate\")\n \n def readline(self):\n ''\n\n\n \n self._unsupported(\"readline\")\n \n def detach(self):\n ''\n\n\n\n\n \n self._unsupported(\"detach\")\n \n @property\n def encoding(self):\n ''\n return None\n \n @property\n def newlines(self):\n ''\n\n\n\n\n \n return None\n \n @property\n def errors(self):\n ''\n\n \n return None\n \n \n \n \n \nclass IncrementalNewlineDecoder(codecs.IncrementalDecoder):\n ''\n\n\n\n\n \n def __init__(self,decoder,translate,errors='strict'):\n codecs.IncrementalDecoder.__init__(self,errors=errors)\n self.translate=translate\n self.decoder=decoder\n self.seennl=0\n self.pendingcr=False\n \n def decode(self,input,final=False ):\n \n if self.decoder is None :\n output=input\n else :\n output=self.decoder.decode(input,final=final)\n if self.pendingcr and (output or final):\n output=\"\\r\"+output\n self.pendingcr=False\n \n \n \n if output.endswith(\"\\r\")and not final:\n output=output[:-1]\n self.pendingcr=True\n \n \n crlf=output.count('\\r\\n')\n cr=output.count('\\r')-crlf\n lf=output.count('\\n')-crlf\n self.seennl |=(lf and self._LF)|(cr and self._CR) |(crlf and self._CRLF)\n \n if self.translate:\n if crlf:\n output=output.replace(\"\\r\\n\",\"\\n\")\n if cr:\n output=output.replace(\"\\r\",\"\\n\")\n \n return output\n \n def getstate(self):\n if self.decoder is None :\n buf=b\"\"\n flag=0\n else :\n buf,flag=self.decoder.getstate()\n flag <<=1\n if self.pendingcr:\n flag |=1\n return buf,flag\n \n def setstate(self,state):\n buf,flag=state\n self.pendingcr=bool(flag&1)\n if self.decoder is not None :\n self.decoder.setstate((buf,flag >>1))\n \n def reset(self):\n self.seennl=0\n self.pendingcr=False\n if self.decoder is not None :\n self.decoder.reset()\n \n _LF=1\n _CR=2\n _CRLF=4\n \n @property\n def newlines(self):\n return (None ,\n \"\\n\",\n \"\\r\",\n (\"\\r\",\"\\n\"),\n \"\\r\\n\",\n (\"\\n\",\"\\r\\n\"),\n (\"\\r\",\"\\r\\n\"),\n (\"\\r\",\"\\n\",\"\\r\\n\")\n )[self.seennl]\n \n \nclass TextIOWrapper(TextIOBase):\n\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n \n _CHUNK_SIZE=2048\n \n \n \n \n def __init__(self,buffer,encoding=None ,errors=None ,newline=None ,\n line_buffering=False ,write_through=False ):\n if newline is not None and not isinstance(newline,str):\n raise TypeError(\"illegal newline type: %r\"%(type(newline),))\n if newline not in (None ,\"\",\"\\n\",\"\\r\",\"\\r\\n\"):\n raise ValueError(\"illegal newline value: %r\"%(newline,))\n if encoding is None :\n try :\n encoding=os.device_encoding(buffer.fileno())\n except (AttributeError,UnsupportedOperation):\n pass\n if encoding is None :\n try :\n import locale\n except ImportError:\n \n encoding=\"ascii\"\n else :\n encoding=locale.getpreferredencoding(False )\n \n if not isinstance(encoding,str):\n raise ValueError(\"invalid encoding: %r\"%encoding)\n \n if errors is None :\n errors=\"strict\"\n else :\n if not isinstance(errors,str):\n raise ValueError(\"invalid errors: %r\"%errors)\n \n self._buffer=buffer\n self._line_buffering=line_buffering\n self._encoding=encoding\n self._errors=errors\n self._readuniversal=not newline\n self._readtranslate=newline is None\n self._readnl=newline\n self._writetranslate=newline !=''\n self._writenl=newline or os.linesep\n self._encoder=None\n self._decoder=None\n self._decoded_chars=''\n self._decoded_chars_used=0\n self._snapshot=None\n self._seekable=self._telling=self.buffer.seekable()\n self._has_read1=hasattr(self.buffer,'read1')\n self._b2cratio=0.0\n \n if self._seekable and self.writable():\n position=self.buffer.tell()\n if position !=0:\n try :\n self._get_encoder().setstate(0)\n except LookupError:\n \n pass\n \n \n \n \n \n \n \n \n \n \n def __repr__(self):\n result=\"<_io.TextIOWrapper\"\n try :\n name=self.name\n except AttributeError:\n pass\n else :\n result +=\" name={0!r}\".format(name)\n try :\n mode=self.mode\n except AttributeError:\n pass\n else :\n result +=\" mode={0!r}\".format(mode)\n return result+\" encoding={0!r}>\".format(self.encoding)\n \n @property\n def encoding(self):\n return self._encoding\n \n @property\n def errors(self):\n return self._errors\n \n @property\n def line_buffering(self):\n return self._line_buffering\n \n @property\n def buffer(self):\n return self._buffer\n \n def seekable(self):\n if self.closed:\n raise ValueError(\"I/O operation on closed file.\")\n return self._seekable\n \n def readable(self):\n return self.buffer.readable()\n \n def writable(self):\n return self.buffer.writable()\n \n def flush(self):\n self.buffer.flush()\n self._telling=self._seekable\n \n def close(self):\n if self.buffer is not None and not self.closed:\n try :\n self.flush()\n finally :\n self.buffer.close()\n \n @property\n def closed(self):\n return self.buffer.closed\n \n @property\n def name(self):\n return self.buffer.name\n \n def fileno(self):\n return self.buffer.fileno()\n \n def isatty(self):\n return self.buffer.isatty()\n \n def write(self,s):\n ''\n if self.closed:\n raise ValueError(\"write to closed file\")\n if not isinstance(s,str):\n raise TypeError(\"can't write %s to text stream\"%\n s.__class__.__name__)\n length=len(s)\n haslf=(self._writetranslate or self._line_buffering)and\"\\n\"in s\n if haslf and self._writetranslate and self._writenl !=\"\\n\":\n s=s.replace(\"\\n\",self._writenl)\n encoder=self._encoder or self._get_encoder()\n \n b=encoder.encode(s)\n self.buffer.write(b)\n if self._line_buffering and (haslf or\"\\r\"in s):\n self.flush()\n self._snapshot=None\n if self._decoder:\n self._decoder.reset()\n return length\n \n def _get_encoder(self):\n make_encoder=codecs.getincrementalencoder(self._encoding)\n self._encoder=make_encoder(self._errors)\n return self._encoder\n \n def _get_decoder(self):\n make_decoder=codecs.getincrementaldecoder(self._encoding)\n decoder=make_decoder(self._errors)\n if self._readuniversal:\n decoder=IncrementalNewlineDecoder(decoder,self._readtranslate)\n self._decoder=decoder\n return decoder\n \n \n \n \n def _set_decoded_chars(self,chars):\n ''\n self._decoded_chars=chars\n self._decoded_chars_used=0\n \n def _get_decoded_chars(self,n=None ):\n ''\n offset=self._decoded_chars_used\n if n is None :\n chars=self._decoded_chars[offset:]\n else :\n chars=self._decoded_chars[offset:offset+n]\n self._decoded_chars_used +=len(chars)\n return chars\n \n def _rewind_decoded_chars(self,n):\n ''\n if self._decoded_chars_used 0:\n decoder.setstate((b'',dec_flags))\n \n n=len(decoder.decode(next_input[:skip_bytes]))\n if n <=chars_to_skip:\n b,d=decoder.getstate()\n if not b:\n \n dec_flags=d\n chars_to_skip -=n\n break\n \n skip_bytes -=len(b)\n skip_back=1\n else :\n \n skip_bytes -=skip_back\n skip_back=skip_back *2\n else :\n skip_bytes=0\n decoder.setstate((b'',dec_flags))\n \n \n start_pos=position+skip_bytes\n start_flags=dec_flags\n if chars_to_skip ==0:\n \n return self._pack_cookie(start_pos,start_flags)\n \n \n \n \n \n bytes_fed=0\n need_eof=0\n \n chars_decoded=0\n for i in range(skip_bytes,len(next_input)):\n bytes_fed +=1\n chars_decoded +=len(decoder.decode(next_input[i:i+1]))\n dec_buffer,dec_flags=decoder.getstate()\n if not dec_buffer and chars_decoded <=chars_to_skip:\n \n start_pos +=bytes_fed\n chars_to_skip -=chars_decoded\n start_flags,bytes_fed,chars_decoded=dec_flags,0,0\n if chars_decoded >=chars_to_skip:\n break\n else :\n \n chars_decoded +=len(decoder.decode(b'',final=True ))\n need_eof=1\n if chars_decoded =0:\n endpos=pos+1\n break\n else :\n start=len(line)\n \n elif self._readuniversal:\n \n \n \n \n nlpos=line.find(\"\\n\",start)\n crpos=line.find(\"\\r\",start)\n if crpos ==-1:\n if nlpos ==-1:\n \n start=len(line)\n else :\n \n endpos=nlpos+1\n break\n elif nlpos ==-1:\n \n endpos=crpos+1\n break\n elif nlpos =0:\n endpos=pos+len(self._readnl)\n break\n \n if limit >=0 and len(line)>=limit:\n endpos=limit\n break\n \n \n while self._read_chunk():\n if self._decoded_chars:\n break\n if self._decoded_chars:\n line +=self._get_decoded_chars()\n else :\n \n self._set_decoded_chars('')\n self._snapshot=None\n return line\n \n if limit >=0 and endpos >limit:\n endpos=limit\n \n \n self._rewind_decoded_chars(len(line)-endpos)\n return line[:endpos]\n \n @property\n def newlines(self):\n return self._decoder.newlines if self._decoder else None\n \n \nclass StringIO(TextIOWrapper):\n ''\n\n\n\n \n \n def __init__(self,initial_value=\"\",newline=\"\\n\"):\n super(StringIO,self).__init__(BytesIO(),\n encoding=\"utf-8\",\n errors=\"strict\",\n newline=newline)\n \n \n if newline is None :\n self._writetranslate=False\n if initial_value is not None :\n if not isinstance(initial_value,str):\n raise TypeError(\"initial_value must be str or None, not {0}\"\n .format(type(initial_value).__name__))\n initial_value=str(initial_value)\n self.write(initial_value)\n self.seek(0)\n \n def getvalue(self):\n self.flush()\n return self.buffer.getvalue().decode(self._encoding,self._errors)\n \n def __repr__(self):\n \n \n return object.__repr__(self)\n \n @property\n def errors(self):\n return None\n \n @property\n def encoding(self):\n return None\n \n def detach(self):\n \n self._unsupported(\"detach\")\n"], "encodings.mac_latin2": [".py", "''\n\n\n\n\n\n\n\n\nimport codecs\n\n\n\nclass Codec(codecs.Codec):\n\n def encode(self,input,errors='strict'):\n return codecs.charmap_encode(input,errors,encoding_table)\n \n def decode(self,input,errors='strict'):\n return codecs.charmap_decode(input,errors,decoding_table)\n \nclass IncrementalEncoder(codecs.IncrementalEncoder):\n def encode(self,input,final=False ):\n return codecs.charmap_encode(input,self.errors,encoding_table)[0]\n \nclass IncrementalDecoder(codecs.IncrementalDecoder):\n def decode(self,input,final=False ):\n return codecs.charmap_decode(input,self.errors,decoding_table)[0]\n \nclass StreamWriter(Codec,codecs.StreamWriter):\n pass\n \nclass StreamReader(Codec,codecs.StreamReader):\n pass\n \n \n \ndef getregentry():\n return codecs.CodecInfo(\n name='mac-latin2',\n encode=Codec().encode,\n decode=Codec().decode,\n incrementalencoder=IncrementalEncoder,\n incrementaldecoder=IncrementalDecoder,\n streamreader=StreamReader,\n streamwriter=StreamWriter,\n )\n \n \n \n \ndecoding_table=(\n'\\x00'\n'\\x01'\n'\\x02'\n'\\x03'\n'\\x04'\n'\\x05'\n'\\x06'\n'\\x07'\n'\\x08'\n'\\t'\n'\\n'\n'\\x0b'\n'\\x0c'\n'\\r'\n'\\x0e'\n'\\x0f'\n'\\x10'\n'\\x11'\n'\\x12'\n'\\x13'\n'\\x14'\n'\\x15'\n'\\x16'\n'\\x17'\n'\\x18'\n'\\x19'\n'\\x1a'\n'\\x1b'\n'\\x1c'\n'\\x1d'\n'\\x1e'\n'\\x1f'\n' '\n'!'\n'\"'\n'#'\n'$'\n'%'\n'&'\n\"'\"\n'('\n')'\n'*'\n'+'\n','\n'-'\n'.'\n'/'\n'0'\n'1'\n'2'\n'3'\n'4'\n'5'\n'6'\n'7'\n'8'\n'9'\n':'\n';'\n'<'\n'='\n'>'\n'?'\n'@'\n'A'\n'B'\n'C'\n'D'\n'E'\n'F'\n'G'\n'H'\n'I'\n'J'\n'K'\n'L'\n'M'\n'N'\n'O'\n'P'\n'Q'\n'R'\n'S'\n'T'\n'U'\n'V'\n'W'\n'X'\n'Y'\n'Z'\n'['\n'\\\\'\n']'\n'^'\n'_'\n'`'\n'a'\n'b'\n'c'\n'd'\n'e'\n'f'\n'g'\n'h'\n'i'\n'j'\n'k'\n'l'\n'm'\n'n'\n'o'\n'p'\n'q'\n'r'\n's'\n't'\n'u'\n'v'\n'w'\n'x'\n'y'\n'z'\n'{'\n'|'\n'}'\n'~'\n'\\x7f'\n'\\xc4'\n'\\u0100'\n'\\u0101'\n'\\xc9'\n'\\u0104'\n'\\xd6'\n'\\xdc'\n'\\xe1'\n'\\u0105'\n'\\u010c'\n'\\xe4'\n'\\u010d'\n'\\u0106'\n'\\u0107'\n'\\xe9'\n'\\u0179'\n'\\u017a'\n'\\u010e'\n'\\xed'\n'\\u010f'\n'\\u0112'\n'\\u0113'\n'\\u0116'\n'\\xf3'\n'\\u0117'\n'\\xf4'\n'\\xf6'\n'\\xf5'\n'\\xfa'\n'\\u011a'\n'\\u011b'\n'\\xfc'\n'\\u2020'\n'\\xb0'\n'\\u0118'\n'\\xa3'\n'\\xa7'\n'\\u2022'\n'\\xb6'\n'\\xdf'\n'\\xae'\n'\\xa9'\n'\\u2122'\n'\\u0119'\n'\\xa8'\n'\\u2260'\n'\\u0123'\n'\\u012e'\n'\\u012f'\n'\\u012a'\n'\\u2264'\n'\\u2265'\n'\\u012b'\n'\\u0136'\n'\\u2202'\n'\\u2211'\n'\\u0142'\n'\\u013b'\n'\\u013c'\n'\\u013d'\n'\\u013e'\n'\\u0139'\n'\\u013a'\n'\\u0145'\n'\\u0146'\n'\\u0143'\n'\\xac'\n'\\u221a'\n'\\u0144'\n'\\u0147'\n'\\u2206'\n'\\xab'\n'\\xbb'\n'\\u2026'\n'\\xa0'\n'\\u0148'\n'\\u0150'\n'\\xd5'\n'\\u0151'\n'\\u014c'\n'\\u2013'\n'\\u2014'\n'\\u201c'\n'\\u201d'\n'\\u2018'\n'\\u2019'\n'\\xf7'\n'\\u25ca'\n'\\u014d'\n'\\u0154'\n'\\u0155'\n'\\u0158'\n'\\u2039'\n'\\u203a'\n'\\u0159'\n'\\u0156'\n'\\u0157'\n'\\u0160'\n'\\u201a'\n'\\u201e'\n'\\u0161'\n'\\u015a'\n'\\u015b'\n'\\xc1'\n'\\u0164'\n'\\u0165'\n'\\xcd'\n'\\u017d'\n'\\u017e'\n'\\u016a'\n'\\xd3'\n'\\xd4'\n'\\u016b'\n'\\u016e'\n'\\xda'\n'\\u016f'\n'\\u0170'\n'\\u0171'\n'\\u0172'\n'\\u0173'\n'\\xdd'\n'\\xfd'\n'\\u0137'\n'\\u017b'\n'\\u0141'\n'\\u017c'\n'\\u0122'\n'\\u02c7'\n)\n\n\nencoding_table=codecs.charmap_build(decoding_table)\n"], "encodings.iso8859_8": [".py", "''\n\n\n\nimport codecs\n\n\n\nclass Codec(codecs.Codec):\n\n def encode(self,input,errors='strict'):\n return codecs.charmap_encode(input,errors,encoding_table)\n \n def decode(self,input,errors='strict'):\n return codecs.charmap_decode(input,errors,decoding_table)\n \nclass IncrementalEncoder(codecs.IncrementalEncoder):\n def encode(self,input,final=False ):\n return codecs.charmap_encode(input,self.errors,encoding_table)[0]\n \nclass IncrementalDecoder(codecs.IncrementalDecoder):\n def decode(self,input,final=False ):\n return codecs.charmap_decode(input,self.errors,decoding_table)[0]\n \nclass StreamWriter(Codec,codecs.StreamWriter):\n pass\n \nclass StreamReader(Codec,codecs.StreamReader):\n pass\n \n \n \ndef getregentry():\n return codecs.CodecInfo(\n name='iso8859-8',\n encode=Codec().encode,\n decode=Codec().decode,\n incrementalencoder=IncrementalEncoder,\n incrementaldecoder=IncrementalDecoder,\n streamreader=StreamReader,\n streamwriter=StreamWriter,\n )\n \n \n \n \ndecoding_table=(\n'\\x00'\n'\\x01'\n'\\x02'\n'\\x03'\n'\\x04'\n'\\x05'\n'\\x06'\n'\\x07'\n'\\x08'\n'\\t'\n'\\n'\n'\\x0b'\n'\\x0c'\n'\\r'\n'\\x0e'\n'\\x0f'\n'\\x10'\n'\\x11'\n'\\x12'\n'\\x13'\n'\\x14'\n'\\x15'\n'\\x16'\n'\\x17'\n'\\x18'\n'\\x19'\n'\\x1a'\n'\\x1b'\n'\\x1c'\n'\\x1d'\n'\\x1e'\n'\\x1f'\n' '\n'!'\n'\"'\n'#'\n'$'\n'%'\n'&'\n\"'\"\n'('\n')'\n'*'\n'+'\n','\n'-'\n'.'\n'/'\n'0'\n'1'\n'2'\n'3'\n'4'\n'5'\n'6'\n'7'\n'8'\n'9'\n':'\n';'\n'<'\n'='\n'>'\n'?'\n'@'\n'A'\n'B'\n'C'\n'D'\n'E'\n'F'\n'G'\n'H'\n'I'\n'J'\n'K'\n'L'\n'M'\n'N'\n'O'\n'P'\n'Q'\n'R'\n'S'\n'T'\n'U'\n'V'\n'W'\n'X'\n'Y'\n'Z'\n'['\n'\\\\'\n']'\n'^'\n'_'\n'`'\n'a'\n'b'\n'c'\n'd'\n'e'\n'f'\n'g'\n'h'\n'i'\n'j'\n'k'\n'l'\n'm'\n'n'\n'o'\n'p'\n'q'\n'r'\n's'\n't'\n'u'\n'v'\n'w'\n'x'\n'y'\n'z'\n'{'\n'|'\n'}'\n'~'\n'\\x7f'\n'\\x80'\n'\\x81'\n'\\x82'\n'\\x83'\n'\\x84'\n'\\x85'\n'\\x86'\n'\\x87'\n'\\x88'\n'\\x89'\n'\\x8a'\n'\\x8b'\n'\\x8c'\n'\\x8d'\n'\\x8e'\n'\\x8f'\n'\\x90'\n'\\x91'\n'\\x92'\n'\\x93'\n'\\x94'\n'\\x95'\n'\\x96'\n'\\x97'\n'\\x98'\n'\\x99'\n'\\x9a'\n'\\x9b'\n'\\x9c'\n'\\x9d'\n'\\x9e'\n'\\x9f'\n'\\xa0'\n'\\ufffe'\n'\\xa2'\n'\\xa3'\n'\\xa4'\n'\\xa5'\n'\\xa6'\n'\\xa7'\n'\\xa8'\n'\\xa9'\n'\\xd7'\n'\\xab'\n'\\xac'\n'\\xad'\n'\\xae'\n'\\xaf'\n'\\xb0'\n'\\xb1'\n'\\xb2'\n'\\xb3'\n'\\xb4'\n'\\xb5'\n'\\xb6'\n'\\xb7'\n'\\xb8'\n'\\xb9'\n'\\xf7'\n'\\xbb'\n'\\xbc'\n'\\xbd'\n'\\xbe'\n'\\ufffe'\n'\\ufffe'\n'\\ufffe'\n'\\ufffe'\n'\\ufffe'\n'\\ufffe'\n'\\ufffe'\n'\\ufffe'\n'\\ufffe'\n'\\ufffe'\n'\\ufffe'\n'\\ufffe'\n'\\ufffe'\n'\\ufffe'\n'\\ufffe'\n'\\ufffe'\n'\\ufffe'\n'\\ufffe'\n'\\ufffe'\n'\\ufffe'\n'\\ufffe'\n'\\ufffe'\n'\\ufffe'\n'\\ufffe'\n'\\ufffe'\n'\\ufffe'\n'\\ufffe'\n'\\ufffe'\n'\\ufffe'\n'\\ufffe'\n'\\ufffe'\n'\\ufffe'\n'\\u2017'\n'\\u05d0'\n'\\u05d1'\n'\\u05d2'\n'\\u05d3'\n'\\u05d4'\n'\\u05d5'\n'\\u05d6'\n'\\u05d7'\n'\\u05d8'\n'\\u05d9'\n'\\u05da'\n'\\u05db'\n'\\u05dc'\n'\\u05dd'\n'\\u05de'\n'\\u05df'\n'\\u05e0'\n'\\u05e1'\n'\\u05e2'\n'\\u05e3'\n'\\u05e4'\n'\\u05e5'\n'\\u05e6'\n'\\u05e7'\n'\\u05e8'\n'\\u05e9'\n'\\u05ea'\n'\\ufffe'\n'\\ufffe'\n'\\u200e'\n'\\u200f'\n'\\ufffe'\n)\n\n\nencoding_table=codecs.charmap_build(decoding_table)\n"], "html.entities": [".py", "''\n\n\nname2codepoint={\n'AElig':0x00c6,\n'Aacute':0x00c1,\n'Acirc':0x00c2,\n'Agrave':0x00c0,\n'Alpha':0x0391,\n'Aring':0x00c5,\n'Atilde':0x00c3,\n'Auml':0x00c4,\n'Beta':0x0392,\n'Ccedil':0x00c7,\n'Chi':0x03a7,\n'Dagger':0x2021,\n'Delta':0x0394,\n'ETH':0x00d0,\n'Eacute':0x00c9,\n'Ecirc':0x00ca,\n'Egrave':0x00c8,\n'Epsilon':0x0395,\n'Eta':0x0397,\n'Euml':0x00cb,\n'Gamma':0x0393,\n'Iacute':0x00cd,\n'Icirc':0x00ce,\n'Igrave':0x00cc,\n'Iota':0x0399,\n'Iuml':0x00cf,\n'Kappa':0x039a,\n'Lambda':0x039b,\n'Mu':0x039c,\n'Ntilde':0x00d1,\n'Nu':0x039d,\n'OElig':0x0152,\n'Oacute':0x00d3,\n'Ocirc':0x00d4,\n'Ograve':0x00d2,\n'Omega':0x03a9,\n'Omicron':0x039f,\n'Oslash':0x00d8,\n'Otilde':0x00d5,\n'Ouml':0x00d6,\n'Phi':0x03a6,\n'Pi':0x03a0,\n'Prime':0x2033,\n'Psi':0x03a8,\n'Rho':0x03a1,\n'Scaron':0x0160,\n'Sigma':0x03a3,\n'THORN':0x00de,\n'Tau':0x03a4,\n'Theta':0x0398,\n'Uacute':0x00da,\n'Ucirc':0x00db,\n'Ugrave':0x00d9,\n'Upsilon':0x03a5,\n'Uuml':0x00dc,\n'Xi':0x039e,\n'Yacute':0x00dd,\n'Yuml':0x0178,\n'Zeta':0x0396,\n'aacute':0x00e1,\n'acirc':0x00e2,\n'acute':0x00b4,\n'aelig':0x00e6,\n'agrave':0x00e0,\n'alefsym':0x2135,\n'alpha':0x03b1,\n'amp':0x0026,\n'and':0x2227,\n'ang':0x2220,\n'aring':0x00e5,\n'asymp':0x2248,\n'atilde':0x00e3,\n'auml':0x00e4,\n'bdquo':0x201e,\n'beta':0x03b2,\n'brvbar':0x00a6,\n'bull':0x2022,\n'cap':0x2229,\n'ccedil':0x00e7,\n'cedil':0x00b8,\n'cent':0x00a2,\n'chi':0x03c7,\n'circ':0x02c6,\n'clubs':0x2663,\n'cong':0x2245,\n'copy':0x00a9,\n'crarr':0x21b5,\n'cup':0x222a,\n'curren':0x00a4,\n'dArr':0x21d3,\n'dagger':0x2020,\n'darr':0x2193,\n'deg':0x00b0,\n'delta':0x03b4,\n'diams':0x2666,\n'divide':0x00f7,\n'eacute':0x00e9,\n'ecirc':0x00ea,\n'egrave':0x00e8,\n'empty':0x2205,\n'emsp':0x2003,\n'ensp':0x2002,\n'epsilon':0x03b5,\n'equiv':0x2261,\n'eta':0x03b7,\n'eth':0x00f0,\n'euml':0x00eb,\n'euro':0x20ac,\n'exist':0x2203,\n'fnof':0x0192,\n'forall':0x2200,\n'frac12':0x00bd,\n'frac14':0x00bc,\n'frac34':0x00be,\n'frasl':0x2044,\n'gamma':0x03b3,\n'ge':0x2265,\n'gt':0x003e,\n'hArr':0x21d4,\n'harr':0x2194,\n'hearts':0x2665,\n'hellip':0x2026,\n'iacute':0x00ed,\n'icirc':0x00ee,\n'iexcl':0x00a1,\n'igrave':0x00ec,\n'image':0x2111,\n'infin':0x221e,\n'int':0x222b,\n'iota':0x03b9,\n'iquest':0x00bf,\n'isin':0x2208,\n'iuml':0x00ef,\n'kappa':0x03ba,\n'lArr':0x21d0,\n'lambda':0x03bb,\n'lang':0x2329,\n'laquo':0x00ab,\n'larr':0x2190,\n'lceil':0x2308,\n'ldquo':0x201c,\n'le':0x2264,\n'lfloor':0x230a,\n'lowast':0x2217,\n'loz':0x25ca,\n'lrm':0x200e,\n'lsaquo':0x2039,\n'lsquo':0x2018,\n'lt':0x003c,\n'macr':0x00af,\n'mdash':0x2014,\n'micro':0x00b5,\n'middot':0x00b7,\n'minus':0x2212,\n'mu':0x03bc,\n'nabla':0x2207,\n'nbsp':0x00a0,\n'ndash':0x2013,\n'ne':0x2260,\n'ni':0x220b,\n'not':0x00ac,\n'notin':0x2209,\n'nsub':0x2284,\n'ntilde':0x00f1,\n'nu':0x03bd,\n'oacute':0x00f3,\n'ocirc':0x00f4,\n'oelig':0x0153,\n'ograve':0x00f2,\n'oline':0x203e,\n'omega':0x03c9,\n'omicron':0x03bf,\n'oplus':0x2295,\n'or':0x2228,\n'ordf':0x00aa,\n'ordm':0x00ba,\n'oslash':0x00f8,\n'otilde':0x00f5,\n'otimes':0x2297,\n'ouml':0x00f6,\n'para':0x00b6,\n'part':0x2202,\n'permil':0x2030,\n'perp':0x22a5,\n'phi':0x03c6,\n'pi':0x03c0,\n'piv':0x03d6,\n'plusmn':0x00b1,\n'pound':0x00a3,\n'prime':0x2032,\n'prod':0x220f,\n'prop':0x221d,\n'psi':0x03c8,\n'quot':0x0022,\n'rArr':0x21d2,\n'radic':0x221a,\n'rang':0x232a,\n'raquo':0x00bb,\n'rarr':0x2192,\n'rceil':0x2309,\n'rdquo':0x201d,\n'real':0x211c,\n'reg':0x00ae,\n'rfloor':0x230b,\n'rho':0x03c1,\n'rlm':0x200f,\n'rsaquo':0x203a,\n'rsquo':0x2019,\n'sbquo':0x201a,\n'scaron':0x0161,\n'sdot':0x22c5,\n'sect':0x00a7,\n'shy':0x00ad,\n'sigma':0x03c3,\n'sigmaf':0x03c2,\n'sim':0x223c,\n'spades':0x2660,\n'sub':0x2282,\n'sube':0x2286,\n'sum':0x2211,\n'sup':0x2283,\n'sup1':0x00b9,\n'sup2':0x00b2,\n'sup3':0x00b3,\n'supe':0x2287,\n'szlig':0x00df,\n'tau':0x03c4,\n'there4':0x2234,\n'theta':0x03b8,\n'thetasym':0x03d1,\n'thinsp':0x2009,\n'thorn':0x00fe,\n'tilde':0x02dc,\n'times':0x00d7,\n'trade':0x2122,\n'uArr':0x21d1,\n'uacute':0x00fa,\n'uarr':0x2191,\n'ucirc':0x00fb,\n'ugrave':0x00f9,\n'uml':0x00a8,\n'upsih':0x03d2,\n'upsilon':0x03c5,\n'uuml':0x00fc,\n'weierp':0x2118,\n'xi':0x03be,\n'yacute':0x00fd,\n'yen':0x00a5,\n'yuml':0x00ff,\n'zeta':0x03b6,\n'zwj':0x200d,\n'zwnj':0x200c,\n}\n\n\n\nhtml5={\n'Aacute':'\\xc1',\n'aacute':'\\xe1',\n'Aacute;':'\\xc1',\n'aacute;':'\\xe1',\n'Abreve;':'\\u0102',\n'abreve;':'\\u0103',\n'ac;':'\\u223e',\n'acd;':'\\u223f',\n'acE;':'\\u223e\\u0333',\n'Acirc':'\\xc2',\n'acirc':'\\xe2',\n'Acirc;':'\\xc2',\n'acirc;':'\\xe2',\n'acute':'\\xb4',\n'acute;':'\\xb4',\n'Acy;':'\\u0410',\n'acy;':'\\u0430',\n'AElig':'\\xc6',\n'aelig':'\\xe6',\n'AElig;':'\\xc6',\n'aelig;':'\\xe6',\n'af;':'\\u2061',\n'Afr;':'\\U0001d504',\n'afr;':'\\U0001d51e',\n'Agrave':'\\xc0',\n'agrave':'\\xe0',\n'Agrave;':'\\xc0',\n'agrave;':'\\xe0',\n'alefsym;':'\\u2135',\n'aleph;':'\\u2135',\n'Alpha;':'\\u0391',\n'alpha;':'\\u03b1',\n'Amacr;':'\\u0100',\n'amacr;':'\\u0101',\n'amalg;':'\\u2a3f',\n'AMP':'&',\n'amp':'&',\n'AMP;':'&',\n'amp;':'&',\n'And;':'\\u2a53',\n'and;':'\\u2227',\n'andand;':'\\u2a55',\n'andd;':'\\u2a5c',\n'andslope;':'\\u2a58',\n'andv;':'\\u2a5a',\n'ang;':'\\u2220',\n'ange;':'\\u29a4',\n'angle;':'\\u2220',\n'angmsd;':'\\u2221',\n'angmsdaa;':'\\u29a8',\n'angmsdab;':'\\u29a9',\n'angmsdac;':'\\u29aa',\n'angmsdad;':'\\u29ab',\n'angmsdae;':'\\u29ac',\n'angmsdaf;':'\\u29ad',\n'angmsdag;':'\\u29ae',\n'angmsdah;':'\\u29af',\n'angrt;':'\\u221f',\n'angrtvb;':'\\u22be',\n'angrtvbd;':'\\u299d',\n'angsph;':'\\u2222',\n'angst;':'\\xc5',\n'angzarr;':'\\u237c',\n'Aogon;':'\\u0104',\n'aogon;':'\\u0105',\n'Aopf;':'\\U0001d538',\n'aopf;':'\\U0001d552',\n'ap;':'\\u2248',\n'apacir;':'\\u2a6f',\n'apE;':'\\u2a70',\n'ape;':'\\u224a',\n'apid;':'\\u224b',\n'apos;':\"'\",\n'ApplyFunction;':'\\u2061',\n'approx;':'\\u2248',\n'approxeq;':'\\u224a',\n'Aring':'\\xc5',\n'aring':'\\xe5',\n'Aring;':'\\xc5',\n'aring;':'\\xe5',\n'Ascr;':'\\U0001d49c',\n'ascr;':'\\U0001d4b6',\n'Assign;':'\\u2254',\n'ast;':'*',\n'asymp;':'\\u2248',\n'asympeq;':'\\u224d',\n'Atilde':'\\xc3',\n'atilde':'\\xe3',\n'Atilde;':'\\xc3',\n'atilde;':'\\xe3',\n'Auml':'\\xc4',\n'auml':'\\xe4',\n'Auml;':'\\xc4',\n'auml;':'\\xe4',\n'awconint;':'\\u2233',\n'awint;':'\\u2a11',\n'backcong;':'\\u224c',\n'backepsilon;':'\\u03f6',\n'backprime;':'\\u2035',\n'backsim;':'\\u223d',\n'backsimeq;':'\\u22cd',\n'Backslash;':'\\u2216',\n'Barv;':'\\u2ae7',\n'barvee;':'\\u22bd',\n'Barwed;':'\\u2306',\n'barwed;':'\\u2305',\n'barwedge;':'\\u2305',\n'bbrk;':'\\u23b5',\n'bbrktbrk;':'\\u23b6',\n'bcong;':'\\u224c',\n'Bcy;':'\\u0411',\n'bcy;':'\\u0431',\n'bdquo;':'\\u201e',\n'becaus;':'\\u2235',\n'Because;':'\\u2235',\n'because;':'\\u2235',\n'bemptyv;':'\\u29b0',\n'bepsi;':'\\u03f6',\n'bernou;':'\\u212c',\n'Bernoullis;':'\\u212c',\n'Beta;':'\\u0392',\n'beta;':'\\u03b2',\n'beth;':'\\u2136',\n'between;':'\\u226c',\n'Bfr;':'\\U0001d505',\n'bfr;':'\\U0001d51f',\n'bigcap;':'\\u22c2',\n'bigcirc;':'\\u25ef',\n'bigcup;':'\\u22c3',\n'bigodot;':'\\u2a00',\n'bigoplus;':'\\u2a01',\n'bigotimes;':'\\u2a02',\n'bigsqcup;':'\\u2a06',\n'bigstar;':'\\u2605',\n'bigtriangledown;':'\\u25bd',\n'bigtriangleup;':'\\u25b3',\n'biguplus;':'\\u2a04',\n'bigvee;':'\\u22c1',\n'bigwedge;':'\\u22c0',\n'bkarow;':'\\u290d',\n'blacklozenge;':'\\u29eb',\n'blacksquare;':'\\u25aa',\n'blacktriangle;':'\\u25b4',\n'blacktriangledown;':'\\u25be',\n'blacktriangleleft;':'\\u25c2',\n'blacktriangleright;':'\\u25b8',\n'blank;':'\\u2423',\n'blk12;':'\\u2592',\n'blk14;':'\\u2591',\n'blk34;':'\\u2593',\n'block;':'\\u2588',\n'bne;':'=\\u20e5',\n'bnequiv;':'\\u2261\\u20e5',\n'bNot;':'\\u2aed',\n'bnot;':'\\u2310',\n'Bopf;':'\\U0001d539',\n'bopf;':'\\U0001d553',\n'bot;':'\\u22a5',\n'bottom;':'\\u22a5',\n'bowtie;':'\\u22c8',\n'boxbox;':'\\u29c9',\n'boxDL;':'\\u2557',\n'boxDl;':'\\u2556',\n'boxdL;':'\\u2555',\n'boxdl;':'\\u2510',\n'boxDR;':'\\u2554',\n'boxDr;':'\\u2553',\n'boxdR;':'\\u2552',\n'boxdr;':'\\u250c',\n'boxH;':'\\u2550',\n'boxh;':'\\u2500',\n'boxHD;':'\\u2566',\n'boxHd;':'\\u2564',\n'boxhD;':'\\u2565',\n'boxhd;':'\\u252c',\n'boxHU;':'\\u2569',\n'boxHu;':'\\u2567',\n'boxhU;':'\\u2568',\n'boxhu;':'\\u2534',\n'boxminus;':'\\u229f',\n'boxplus;':'\\u229e',\n'boxtimes;':'\\u22a0',\n'boxUL;':'\\u255d',\n'boxUl;':'\\u255c',\n'boxuL;':'\\u255b',\n'boxul;':'\\u2518',\n'boxUR;':'\\u255a',\n'boxUr;':'\\u2559',\n'boxuR;':'\\u2558',\n'boxur;':'\\u2514',\n'boxV;':'\\u2551',\n'boxv;':'\\u2502',\n'boxVH;':'\\u256c',\n'boxVh;':'\\u256b',\n'boxvH;':'\\u256a',\n'boxvh;':'\\u253c',\n'boxVL;':'\\u2563',\n'boxVl;':'\\u2562',\n'boxvL;':'\\u2561',\n'boxvl;':'\\u2524',\n'boxVR;':'\\u2560',\n'boxVr;':'\\u255f',\n'boxvR;':'\\u255e',\n'boxvr;':'\\u251c',\n'bprime;':'\\u2035',\n'Breve;':'\\u02d8',\n'breve;':'\\u02d8',\n'brvbar':'\\xa6',\n'brvbar;':'\\xa6',\n'Bscr;':'\\u212c',\n'bscr;':'\\U0001d4b7',\n'bsemi;':'\\u204f',\n'bsim;':'\\u223d',\n'bsime;':'\\u22cd',\n'bsol;':'\\\\',\n'bsolb;':'\\u29c5',\n'bsolhsub;':'\\u27c8',\n'bull;':'\\u2022',\n'bullet;':'\\u2022',\n'bump;':'\\u224e',\n'bumpE;':'\\u2aae',\n'bumpe;':'\\u224f',\n'Bumpeq;':'\\u224e',\n'bumpeq;':'\\u224f',\n'Cacute;':'\\u0106',\n'cacute;':'\\u0107',\n'Cap;':'\\u22d2',\n'cap;':'\\u2229',\n'capand;':'\\u2a44',\n'capbrcup;':'\\u2a49',\n'capcap;':'\\u2a4b',\n'capcup;':'\\u2a47',\n'capdot;':'\\u2a40',\n'CapitalDifferentialD;':'\\u2145',\n'caps;':'\\u2229\\ufe00',\n'caret;':'\\u2041',\n'caron;':'\\u02c7',\n'Cayleys;':'\\u212d',\n'ccaps;':'\\u2a4d',\n'Ccaron;':'\\u010c',\n'ccaron;':'\\u010d',\n'Ccedil':'\\xc7',\n'ccedil':'\\xe7',\n'Ccedil;':'\\xc7',\n'ccedil;':'\\xe7',\n'Ccirc;':'\\u0108',\n'ccirc;':'\\u0109',\n'Cconint;':'\\u2230',\n'ccups;':'\\u2a4c',\n'ccupssm;':'\\u2a50',\n'Cdot;':'\\u010a',\n'cdot;':'\\u010b',\n'cedil':'\\xb8',\n'cedil;':'\\xb8',\n'Cedilla;':'\\xb8',\n'cemptyv;':'\\u29b2',\n'cent':'\\xa2',\n'cent;':'\\xa2',\n'CenterDot;':'\\xb7',\n'centerdot;':'\\xb7',\n'Cfr;':'\\u212d',\n'cfr;':'\\U0001d520',\n'CHcy;':'\\u0427',\n'chcy;':'\\u0447',\n'check;':'\\u2713',\n'checkmark;':'\\u2713',\n'Chi;':'\\u03a7',\n'chi;':'\\u03c7',\n'cir;':'\\u25cb',\n'circ;':'\\u02c6',\n'circeq;':'\\u2257',\n'circlearrowleft;':'\\u21ba',\n'circlearrowright;':'\\u21bb',\n'circledast;':'\\u229b',\n'circledcirc;':'\\u229a',\n'circleddash;':'\\u229d',\n'CircleDot;':'\\u2299',\n'circledR;':'\\xae',\n'circledS;':'\\u24c8',\n'CircleMinus;':'\\u2296',\n'CirclePlus;':'\\u2295',\n'CircleTimes;':'\\u2297',\n'cirE;':'\\u29c3',\n'cire;':'\\u2257',\n'cirfnint;':'\\u2a10',\n'cirmid;':'\\u2aef',\n'cirscir;':'\\u29c2',\n'ClockwiseContourIntegral;':'\\u2232',\n'CloseCurlyDoubleQuote;':'\\u201d',\n'CloseCurlyQuote;':'\\u2019',\n'clubs;':'\\u2663',\n'clubsuit;':'\\u2663',\n'Colon;':'\\u2237',\n'colon;':':',\n'Colone;':'\\u2a74',\n'colone;':'\\u2254',\n'coloneq;':'\\u2254',\n'comma;':',',\n'commat;':'@',\n'comp;':'\\u2201',\n'compfn;':'\\u2218',\n'complement;':'\\u2201',\n'complexes;':'\\u2102',\n'cong;':'\\u2245',\n'congdot;':'\\u2a6d',\n'Congruent;':'\\u2261',\n'Conint;':'\\u222f',\n'conint;':'\\u222e',\n'ContourIntegral;':'\\u222e',\n'Copf;':'\\u2102',\n'copf;':'\\U0001d554',\n'coprod;':'\\u2210',\n'Coproduct;':'\\u2210',\n'COPY':'\\xa9',\n'copy':'\\xa9',\n'COPY;':'\\xa9',\n'copy;':'\\xa9',\n'copysr;':'\\u2117',\n'CounterClockwiseContourIntegral;':'\\u2233',\n'crarr;':'\\u21b5',\n'Cross;':'\\u2a2f',\n'cross;':'\\u2717',\n'Cscr;':'\\U0001d49e',\n'cscr;':'\\U0001d4b8',\n'csub;':'\\u2acf',\n'csube;':'\\u2ad1',\n'csup;':'\\u2ad0',\n'csupe;':'\\u2ad2',\n'ctdot;':'\\u22ef',\n'cudarrl;':'\\u2938',\n'cudarrr;':'\\u2935',\n'cuepr;':'\\u22de',\n'cuesc;':'\\u22df',\n'cularr;':'\\u21b6',\n'cularrp;':'\\u293d',\n'Cup;':'\\u22d3',\n'cup;':'\\u222a',\n'cupbrcap;':'\\u2a48',\n'CupCap;':'\\u224d',\n'cupcap;':'\\u2a46',\n'cupcup;':'\\u2a4a',\n'cupdot;':'\\u228d',\n'cupor;':'\\u2a45',\n'cups;':'\\u222a\\ufe00',\n'curarr;':'\\u21b7',\n'curarrm;':'\\u293c',\n'curlyeqprec;':'\\u22de',\n'curlyeqsucc;':'\\u22df',\n'curlyvee;':'\\u22ce',\n'curlywedge;':'\\u22cf',\n'curren':'\\xa4',\n'curren;':'\\xa4',\n'curvearrowleft;':'\\u21b6',\n'curvearrowright;':'\\u21b7',\n'cuvee;':'\\u22ce',\n'cuwed;':'\\u22cf',\n'cwconint;':'\\u2232',\n'cwint;':'\\u2231',\n'cylcty;':'\\u232d',\n'Dagger;':'\\u2021',\n'dagger;':'\\u2020',\n'daleth;':'\\u2138',\n'Darr;':'\\u21a1',\n'dArr;':'\\u21d3',\n'darr;':'\\u2193',\n'dash;':'\\u2010',\n'Dashv;':'\\u2ae4',\n'dashv;':'\\u22a3',\n'dbkarow;':'\\u290f',\n'dblac;':'\\u02dd',\n'Dcaron;':'\\u010e',\n'dcaron;':'\\u010f',\n'Dcy;':'\\u0414',\n'dcy;':'\\u0434',\n'DD;':'\\u2145',\n'dd;':'\\u2146',\n'ddagger;':'\\u2021',\n'ddarr;':'\\u21ca',\n'DDotrahd;':'\\u2911',\n'ddotseq;':'\\u2a77',\n'deg':'\\xb0',\n'deg;':'\\xb0',\n'Del;':'\\u2207',\n'Delta;':'\\u0394',\n'delta;':'\\u03b4',\n'demptyv;':'\\u29b1',\n'dfisht;':'\\u297f',\n'Dfr;':'\\U0001d507',\n'dfr;':'\\U0001d521',\n'dHar;':'\\u2965',\n'dharl;':'\\u21c3',\n'dharr;':'\\u21c2',\n'DiacriticalAcute;':'\\xb4',\n'DiacriticalDot;':'\\u02d9',\n'DiacriticalDoubleAcute;':'\\u02dd',\n'DiacriticalGrave;':'`',\n'DiacriticalTilde;':'\\u02dc',\n'diam;':'\\u22c4',\n'Diamond;':'\\u22c4',\n'diamond;':'\\u22c4',\n'diamondsuit;':'\\u2666',\n'diams;':'\\u2666',\n'die;':'\\xa8',\n'DifferentialD;':'\\u2146',\n'digamma;':'\\u03dd',\n'disin;':'\\u22f2',\n'div;':'\\xf7',\n'divide':'\\xf7',\n'divide;':'\\xf7',\n'divideontimes;':'\\u22c7',\n'divonx;':'\\u22c7',\n'DJcy;':'\\u0402',\n'djcy;':'\\u0452',\n'dlcorn;':'\\u231e',\n'dlcrop;':'\\u230d',\n'dollar;':'$',\n'Dopf;':'\\U0001d53b',\n'dopf;':'\\U0001d555',\n'Dot;':'\\xa8',\n'dot;':'\\u02d9',\n'DotDot;':'\\u20dc',\n'doteq;':'\\u2250',\n'doteqdot;':'\\u2251',\n'DotEqual;':'\\u2250',\n'dotminus;':'\\u2238',\n'dotplus;':'\\u2214',\n'dotsquare;':'\\u22a1',\n'doublebarwedge;':'\\u2306',\n'DoubleContourIntegral;':'\\u222f',\n'DoubleDot;':'\\xa8',\n'DoubleDownArrow;':'\\u21d3',\n'DoubleLeftArrow;':'\\u21d0',\n'DoubleLeftRightArrow;':'\\u21d4',\n'DoubleLeftTee;':'\\u2ae4',\n'DoubleLongLeftArrow;':'\\u27f8',\n'DoubleLongLeftRightArrow;':'\\u27fa',\n'DoubleLongRightArrow;':'\\u27f9',\n'DoubleRightArrow;':'\\u21d2',\n'DoubleRightTee;':'\\u22a8',\n'DoubleUpArrow;':'\\u21d1',\n'DoubleUpDownArrow;':'\\u21d5',\n'DoubleVerticalBar;':'\\u2225',\n'DownArrow;':'\\u2193',\n'Downarrow;':'\\u21d3',\n'downarrow;':'\\u2193',\n'DownArrowBar;':'\\u2913',\n'DownArrowUpArrow;':'\\u21f5',\n'DownBreve;':'\\u0311',\n'downdownarrows;':'\\u21ca',\n'downharpoonleft;':'\\u21c3',\n'downharpoonright;':'\\u21c2',\n'DownLeftRightVector;':'\\u2950',\n'DownLeftTeeVector;':'\\u295e',\n'DownLeftVector;':'\\u21bd',\n'DownLeftVectorBar;':'\\u2956',\n'DownRightTeeVector;':'\\u295f',\n'DownRightVector;':'\\u21c1',\n'DownRightVectorBar;':'\\u2957',\n'DownTee;':'\\u22a4',\n'DownTeeArrow;':'\\u21a7',\n'drbkarow;':'\\u2910',\n'drcorn;':'\\u231f',\n'drcrop;':'\\u230c',\n'Dscr;':'\\U0001d49f',\n'dscr;':'\\U0001d4b9',\n'DScy;':'\\u0405',\n'dscy;':'\\u0455',\n'dsol;':'\\u29f6',\n'Dstrok;':'\\u0110',\n'dstrok;':'\\u0111',\n'dtdot;':'\\u22f1',\n'dtri;':'\\u25bf',\n'dtrif;':'\\u25be',\n'duarr;':'\\u21f5',\n'duhar;':'\\u296f',\n'dwangle;':'\\u29a6',\n'DZcy;':'\\u040f',\n'dzcy;':'\\u045f',\n'dzigrarr;':'\\u27ff',\n'Eacute':'\\xc9',\n'eacute':'\\xe9',\n'Eacute;':'\\xc9',\n'eacute;':'\\xe9',\n'easter;':'\\u2a6e',\n'Ecaron;':'\\u011a',\n'ecaron;':'\\u011b',\n'ecir;':'\\u2256',\n'Ecirc':'\\xca',\n'ecirc':'\\xea',\n'Ecirc;':'\\xca',\n'ecirc;':'\\xea',\n'ecolon;':'\\u2255',\n'Ecy;':'\\u042d',\n'ecy;':'\\u044d',\n'eDDot;':'\\u2a77',\n'Edot;':'\\u0116',\n'eDot;':'\\u2251',\n'edot;':'\\u0117',\n'ee;':'\\u2147',\n'efDot;':'\\u2252',\n'Efr;':'\\U0001d508',\n'efr;':'\\U0001d522',\n'eg;':'\\u2a9a',\n'Egrave':'\\xc8',\n'egrave':'\\xe8',\n'Egrave;':'\\xc8',\n'egrave;':'\\xe8',\n'egs;':'\\u2a96',\n'egsdot;':'\\u2a98',\n'el;':'\\u2a99',\n'Element;':'\\u2208',\n'elinters;':'\\u23e7',\n'ell;':'\\u2113',\n'els;':'\\u2a95',\n'elsdot;':'\\u2a97',\n'Emacr;':'\\u0112',\n'emacr;':'\\u0113',\n'empty;':'\\u2205',\n'emptyset;':'\\u2205',\n'EmptySmallSquare;':'\\u25fb',\n'emptyv;':'\\u2205',\n'EmptyVerySmallSquare;':'\\u25ab',\n'emsp13;':'\\u2004',\n'emsp14;':'\\u2005',\n'emsp;':'\\u2003',\n'ENG;':'\\u014a',\n'eng;':'\\u014b',\n'ensp;':'\\u2002',\n'Eogon;':'\\u0118',\n'eogon;':'\\u0119',\n'Eopf;':'\\U0001d53c',\n'eopf;':'\\U0001d556',\n'epar;':'\\u22d5',\n'eparsl;':'\\u29e3',\n'eplus;':'\\u2a71',\n'epsi;':'\\u03b5',\n'Epsilon;':'\\u0395',\n'epsilon;':'\\u03b5',\n'epsiv;':'\\u03f5',\n'eqcirc;':'\\u2256',\n'eqcolon;':'\\u2255',\n'eqsim;':'\\u2242',\n'eqslantgtr;':'\\u2a96',\n'eqslantless;':'\\u2a95',\n'Equal;':'\\u2a75',\n'equals;':'=',\n'EqualTilde;':'\\u2242',\n'equest;':'\\u225f',\n'Equilibrium;':'\\u21cc',\n'equiv;':'\\u2261',\n'equivDD;':'\\u2a78',\n'eqvparsl;':'\\u29e5',\n'erarr;':'\\u2971',\n'erDot;':'\\u2253',\n'Escr;':'\\u2130',\n'escr;':'\\u212f',\n'esdot;':'\\u2250',\n'Esim;':'\\u2a73',\n'esim;':'\\u2242',\n'Eta;':'\\u0397',\n'eta;':'\\u03b7',\n'ETH':'\\xd0',\n'eth':'\\xf0',\n'ETH;':'\\xd0',\n'eth;':'\\xf0',\n'Euml':'\\xcb',\n'euml':'\\xeb',\n'Euml;':'\\xcb',\n'euml;':'\\xeb',\n'euro;':'\\u20ac',\n'excl;':'!',\n'exist;':'\\u2203',\n'Exists;':'\\u2203',\n'expectation;':'\\u2130',\n'ExponentialE;':'\\u2147',\n'exponentiale;':'\\u2147',\n'fallingdotseq;':'\\u2252',\n'Fcy;':'\\u0424',\n'fcy;':'\\u0444',\n'female;':'\\u2640',\n'ffilig;':'\\ufb03',\n'fflig;':'\\ufb00',\n'ffllig;':'\\ufb04',\n'Ffr;':'\\U0001d509',\n'ffr;':'\\U0001d523',\n'filig;':'\\ufb01',\n'FilledSmallSquare;':'\\u25fc',\n'FilledVerySmallSquare;':'\\u25aa',\n'fjlig;':'fj',\n'flat;':'\\u266d',\n'fllig;':'\\ufb02',\n'fltns;':'\\u25b1',\n'fnof;':'\\u0192',\n'Fopf;':'\\U0001d53d',\n'fopf;':'\\U0001d557',\n'ForAll;':'\\u2200',\n'forall;':'\\u2200',\n'fork;':'\\u22d4',\n'forkv;':'\\u2ad9',\n'Fouriertrf;':'\\u2131',\n'fpartint;':'\\u2a0d',\n'frac12':'\\xbd',\n'frac12;':'\\xbd',\n'frac13;':'\\u2153',\n'frac14':'\\xbc',\n'frac14;':'\\xbc',\n'frac15;':'\\u2155',\n'frac16;':'\\u2159',\n'frac18;':'\\u215b',\n'frac23;':'\\u2154',\n'frac25;':'\\u2156',\n'frac34':'\\xbe',\n'frac34;':'\\xbe',\n'frac35;':'\\u2157',\n'frac38;':'\\u215c',\n'frac45;':'\\u2158',\n'frac56;':'\\u215a',\n'frac58;':'\\u215d',\n'frac78;':'\\u215e',\n'frasl;':'\\u2044',\n'frown;':'\\u2322',\n'Fscr;':'\\u2131',\n'fscr;':'\\U0001d4bb',\n'gacute;':'\\u01f5',\n'Gamma;':'\\u0393',\n'gamma;':'\\u03b3',\n'Gammad;':'\\u03dc',\n'gammad;':'\\u03dd',\n'gap;':'\\u2a86',\n'Gbreve;':'\\u011e',\n'gbreve;':'\\u011f',\n'Gcedil;':'\\u0122',\n'Gcirc;':'\\u011c',\n'gcirc;':'\\u011d',\n'Gcy;':'\\u0413',\n'gcy;':'\\u0433',\n'Gdot;':'\\u0120',\n'gdot;':'\\u0121',\n'gE;':'\\u2267',\n'ge;':'\\u2265',\n'gEl;':'\\u2a8c',\n'gel;':'\\u22db',\n'geq;':'\\u2265',\n'geqq;':'\\u2267',\n'geqslant;':'\\u2a7e',\n'ges;':'\\u2a7e',\n'gescc;':'\\u2aa9',\n'gesdot;':'\\u2a80',\n'gesdoto;':'\\u2a82',\n'gesdotol;':'\\u2a84',\n'gesl;':'\\u22db\\ufe00',\n'gesles;':'\\u2a94',\n'Gfr;':'\\U0001d50a',\n'gfr;':'\\U0001d524',\n'Gg;':'\\u22d9',\n'gg;':'\\u226b',\n'ggg;':'\\u22d9',\n'gimel;':'\\u2137',\n'GJcy;':'\\u0403',\n'gjcy;':'\\u0453',\n'gl;':'\\u2277',\n'gla;':'\\u2aa5',\n'glE;':'\\u2a92',\n'glj;':'\\u2aa4',\n'gnap;':'\\u2a8a',\n'gnapprox;':'\\u2a8a',\n'gnE;':'\\u2269',\n'gne;':'\\u2a88',\n'gneq;':'\\u2a88',\n'gneqq;':'\\u2269',\n'gnsim;':'\\u22e7',\n'Gopf;':'\\U0001d53e',\n'gopf;':'\\U0001d558',\n'grave;':'`',\n'GreaterEqual;':'\\u2265',\n'GreaterEqualLess;':'\\u22db',\n'GreaterFullEqual;':'\\u2267',\n'GreaterGreater;':'\\u2aa2',\n'GreaterLess;':'\\u2277',\n'GreaterSlantEqual;':'\\u2a7e',\n'GreaterTilde;':'\\u2273',\n'Gscr;':'\\U0001d4a2',\n'gscr;':'\\u210a',\n'gsim;':'\\u2273',\n'gsime;':'\\u2a8e',\n'gsiml;':'\\u2a90',\n'GT':'>',\n'gt':'>',\n'GT;':'>',\n'Gt;':'\\u226b',\n'gt;':'>',\n'gtcc;':'\\u2aa7',\n'gtcir;':'\\u2a7a',\n'gtdot;':'\\u22d7',\n'gtlPar;':'\\u2995',\n'gtquest;':'\\u2a7c',\n'gtrapprox;':'\\u2a86',\n'gtrarr;':'\\u2978',\n'gtrdot;':'\\u22d7',\n'gtreqless;':'\\u22db',\n'gtreqqless;':'\\u2a8c',\n'gtrless;':'\\u2277',\n'gtrsim;':'\\u2273',\n'gvertneqq;':'\\u2269\\ufe00',\n'gvnE;':'\\u2269\\ufe00',\n'Hacek;':'\\u02c7',\n'hairsp;':'\\u200a',\n'half;':'\\xbd',\n'hamilt;':'\\u210b',\n'HARDcy;':'\\u042a',\n'hardcy;':'\\u044a',\n'hArr;':'\\u21d4',\n'harr;':'\\u2194',\n'harrcir;':'\\u2948',\n'harrw;':'\\u21ad',\n'Hat;':'^',\n'hbar;':'\\u210f',\n'Hcirc;':'\\u0124',\n'hcirc;':'\\u0125',\n'hearts;':'\\u2665',\n'heartsuit;':'\\u2665',\n'hellip;':'\\u2026',\n'hercon;':'\\u22b9',\n'Hfr;':'\\u210c',\n'hfr;':'\\U0001d525',\n'HilbertSpace;':'\\u210b',\n'hksearow;':'\\u2925',\n'hkswarow;':'\\u2926',\n'hoarr;':'\\u21ff',\n'homtht;':'\\u223b',\n'hookleftarrow;':'\\u21a9',\n'hookrightarrow;':'\\u21aa',\n'Hopf;':'\\u210d',\n'hopf;':'\\U0001d559',\n'horbar;':'\\u2015',\n'HorizontalLine;':'\\u2500',\n'Hscr;':'\\u210b',\n'hscr;':'\\U0001d4bd',\n'hslash;':'\\u210f',\n'Hstrok;':'\\u0126',\n'hstrok;':'\\u0127',\n'HumpDownHump;':'\\u224e',\n'HumpEqual;':'\\u224f',\n'hybull;':'\\u2043',\n'hyphen;':'\\u2010',\n'Iacute':'\\xcd',\n'iacute':'\\xed',\n'Iacute;':'\\xcd',\n'iacute;':'\\xed',\n'ic;':'\\u2063',\n'Icirc':'\\xce',\n'icirc':'\\xee',\n'Icirc;':'\\xce',\n'icirc;':'\\xee',\n'Icy;':'\\u0418',\n'icy;':'\\u0438',\n'Idot;':'\\u0130',\n'IEcy;':'\\u0415',\n'iecy;':'\\u0435',\n'iexcl':'\\xa1',\n'iexcl;':'\\xa1',\n'iff;':'\\u21d4',\n'Ifr;':'\\u2111',\n'ifr;':'\\U0001d526',\n'Igrave':'\\xcc',\n'igrave':'\\xec',\n'Igrave;':'\\xcc',\n'igrave;':'\\xec',\n'ii;':'\\u2148',\n'iiiint;':'\\u2a0c',\n'iiint;':'\\u222d',\n'iinfin;':'\\u29dc',\n'iiota;':'\\u2129',\n'IJlig;':'\\u0132',\n'ijlig;':'\\u0133',\n'Im;':'\\u2111',\n'Imacr;':'\\u012a',\n'imacr;':'\\u012b',\n'image;':'\\u2111',\n'ImaginaryI;':'\\u2148',\n'imagline;':'\\u2110',\n'imagpart;':'\\u2111',\n'imath;':'\\u0131',\n'imof;':'\\u22b7',\n'imped;':'\\u01b5',\n'Implies;':'\\u21d2',\n'in;':'\\u2208',\n'incare;':'\\u2105',\n'infin;':'\\u221e',\n'infintie;':'\\u29dd',\n'inodot;':'\\u0131',\n'Int;':'\\u222c',\n'int;':'\\u222b',\n'intcal;':'\\u22ba',\n'integers;':'\\u2124',\n'Integral;':'\\u222b',\n'intercal;':'\\u22ba',\n'Intersection;':'\\u22c2',\n'intlarhk;':'\\u2a17',\n'intprod;':'\\u2a3c',\n'InvisibleComma;':'\\u2063',\n'InvisibleTimes;':'\\u2062',\n'IOcy;':'\\u0401',\n'iocy;':'\\u0451',\n'Iogon;':'\\u012e',\n'iogon;':'\\u012f',\n'Iopf;':'\\U0001d540',\n'iopf;':'\\U0001d55a',\n'Iota;':'\\u0399',\n'iota;':'\\u03b9',\n'iprod;':'\\u2a3c',\n'iquest':'\\xbf',\n'iquest;':'\\xbf',\n'Iscr;':'\\u2110',\n'iscr;':'\\U0001d4be',\n'isin;':'\\u2208',\n'isindot;':'\\u22f5',\n'isinE;':'\\u22f9',\n'isins;':'\\u22f4',\n'isinsv;':'\\u22f3',\n'isinv;':'\\u2208',\n'it;':'\\u2062',\n'Itilde;':'\\u0128',\n'itilde;':'\\u0129',\n'Iukcy;':'\\u0406',\n'iukcy;':'\\u0456',\n'Iuml':'\\xcf',\n'iuml':'\\xef',\n'Iuml;':'\\xcf',\n'iuml;':'\\xef',\n'Jcirc;':'\\u0134',\n'jcirc;':'\\u0135',\n'Jcy;':'\\u0419',\n'jcy;':'\\u0439',\n'Jfr;':'\\U0001d50d',\n'jfr;':'\\U0001d527',\n'jmath;':'\\u0237',\n'Jopf;':'\\U0001d541',\n'jopf;':'\\U0001d55b',\n'Jscr;':'\\U0001d4a5',\n'jscr;':'\\U0001d4bf',\n'Jsercy;':'\\u0408',\n'jsercy;':'\\u0458',\n'Jukcy;':'\\u0404',\n'jukcy;':'\\u0454',\n'Kappa;':'\\u039a',\n'kappa;':'\\u03ba',\n'kappav;':'\\u03f0',\n'Kcedil;':'\\u0136',\n'kcedil;':'\\u0137',\n'Kcy;':'\\u041a',\n'kcy;':'\\u043a',\n'Kfr;':'\\U0001d50e',\n'kfr;':'\\U0001d528',\n'kgreen;':'\\u0138',\n'KHcy;':'\\u0425',\n'khcy;':'\\u0445',\n'KJcy;':'\\u040c',\n'kjcy;':'\\u045c',\n'Kopf;':'\\U0001d542',\n'kopf;':'\\U0001d55c',\n'Kscr;':'\\U0001d4a6',\n'kscr;':'\\U0001d4c0',\n'lAarr;':'\\u21da',\n'Lacute;':'\\u0139',\n'lacute;':'\\u013a',\n'laemptyv;':'\\u29b4',\n'lagran;':'\\u2112',\n'Lambda;':'\\u039b',\n'lambda;':'\\u03bb',\n'Lang;':'\\u27ea',\n'lang;':'\\u27e8',\n'langd;':'\\u2991',\n'langle;':'\\u27e8',\n'lap;':'\\u2a85',\n'Laplacetrf;':'\\u2112',\n'laquo':'\\xab',\n'laquo;':'\\xab',\n'Larr;':'\\u219e',\n'lArr;':'\\u21d0',\n'larr;':'\\u2190',\n'larrb;':'\\u21e4',\n'larrbfs;':'\\u291f',\n'larrfs;':'\\u291d',\n'larrhk;':'\\u21a9',\n'larrlp;':'\\u21ab',\n'larrpl;':'\\u2939',\n'larrsim;':'\\u2973',\n'larrtl;':'\\u21a2',\n'lat;':'\\u2aab',\n'lAtail;':'\\u291b',\n'latail;':'\\u2919',\n'late;':'\\u2aad',\n'lates;':'\\u2aad\\ufe00',\n'lBarr;':'\\u290e',\n'lbarr;':'\\u290c',\n'lbbrk;':'\\u2772',\n'lbrace;':'{',\n'lbrack;':'[',\n'lbrke;':'\\u298b',\n'lbrksld;':'\\u298f',\n'lbrkslu;':'\\u298d',\n'Lcaron;':'\\u013d',\n'lcaron;':'\\u013e',\n'Lcedil;':'\\u013b',\n'lcedil;':'\\u013c',\n'lceil;':'\\u2308',\n'lcub;':'{',\n'Lcy;':'\\u041b',\n'lcy;':'\\u043b',\n'ldca;':'\\u2936',\n'ldquo;':'\\u201c',\n'ldquor;':'\\u201e',\n'ldrdhar;':'\\u2967',\n'ldrushar;':'\\u294b',\n'ldsh;':'\\u21b2',\n'lE;':'\\u2266',\n'le;':'\\u2264',\n'LeftAngleBracket;':'\\u27e8',\n'LeftArrow;':'\\u2190',\n'Leftarrow;':'\\u21d0',\n'leftarrow;':'\\u2190',\n'LeftArrowBar;':'\\u21e4',\n'LeftArrowRightArrow;':'\\u21c6',\n'leftarrowtail;':'\\u21a2',\n'LeftCeiling;':'\\u2308',\n'LeftDoubleBracket;':'\\u27e6',\n'LeftDownTeeVector;':'\\u2961',\n'LeftDownVector;':'\\u21c3',\n'LeftDownVectorBar;':'\\u2959',\n'LeftFloor;':'\\u230a',\n'leftharpoondown;':'\\u21bd',\n'leftharpoonup;':'\\u21bc',\n'leftleftarrows;':'\\u21c7',\n'LeftRightArrow;':'\\u2194',\n'Leftrightarrow;':'\\u21d4',\n'leftrightarrow;':'\\u2194',\n'leftrightarrows;':'\\u21c6',\n'leftrightharpoons;':'\\u21cb',\n'leftrightsquigarrow;':'\\u21ad',\n'LeftRightVector;':'\\u294e',\n'LeftTee;':'\\u22a3',\n'LeftTeeArrow;':'\\u21a4',\n'LeftTeeVector;':'\\u295a',\n'leftthreetimes;':'\\u22cb',\n'LeftTriangle;':'\\u22b2',\n'LeftTriangleBar;':'\\u29cf',\n'LeftTriangleEqual;':'\\u22b4',\n'LeftUpDownVector;':'\\u2951',\n'LeftUpTeeVector;':'\\u2960',\n'LeftUpVector;':'\\u21bf',\n'LeftUpVectorBar;':'\\u2958',\n'LeftVector;':'\\u21bc',\n'LeftVectorBar;':'\\u2952',\n'lEg;':'\\u2a8b',\n'leg;':'\\u22da',\n'leq;':'\\u2264',\n'leqq;':'\\u2266',\n'leqslant;':'\\u2a7d',\n'les;':'\\u2a7d',\n'lescc;':'\\u2aa8',\n'lesdot;':'\\u2a7f',\n'lesdoto;':'\\u2a81',\n'lesdotor;':'\\u2a83',\n'lesg;':'\\u22da\\ufe00',\n'lesges;':'\\u2a93',\n'lessapprox;':'\\u2a85',\n'lessdot;':'\\u22d6',\n'lesseqgtr;':'\\u22da',\n'lesseqqgtr;':'\\u2a8b',\n'LessEqualGreater;':'\\u22da',\n'LessFullEqual;':'\\u2266',\n'LessGreater;':'\\u2276',\n'lessgtr;':'\\u2276',\n'LessLess;':'\\u2aa1',\n'lesssim;':'\\u2272',\n'LessSlantEqual;':'\\u2a7d',\n'LessTilde;':'\\u2272',\n'lfisht;':'\\u297c',\n'lfloor;':'\\u230a',\n'Lfr;':'\\U0001d50f',\n'lfr;':'\\U0001d529',\n'lg;':'\\u2276',\n'lgE;':'\\u2a91',\n'lHar;':'\\u2962',\n'lhard;':'\\u21bd',\n'lharu;':'\\u21bc',\n'lharul;':'\\u296a',\n'lhblk;':'\\u2584',\n'LJcy;':'\\u0409',\n'ljcy;':'\\u0459',\n'Ll;':'\\u22d8',\n'll;':'\\u226a',\n'llarr;':'\\u21c7',\n'llcorner;':'\\u231e',\n'Lleftarrow;':'\\u21da',\n'llhard;':'\\u296b',\n'lltri;':'\\u25fa',\n'Lmidot;':'\\u013f',\n'lmidot;':'\\u0140',\n'lmoust;':'\\u23b0',\n'lmoustache;':'\\u23b0',\n'lnap;':'\\u2a89',\n'lnapprox;':'\\u2a89',\n'lnE;':'\\u2268',\n'lne;':'\\u2a87',\n'lneq;':'\\u2a87',\n'lneqq;':'\\u2268',\n'lnsim;':'\\u22e6',\n'loang;':'\\u27ec',\n'loarr;':'\\u21fd',\n'lobrk;':'\\u27e6',\n'LongLeftArrow;':'\\u27f5',\n'Longleftarrow;':'\\u27f8',\n'longleftarrow;':'\\u27f5',\n'LongLeftRightArrow;':'\\u27f7',\n'Longleftrightarrow;':'\\u27fa',\n'longleftrightarrow;':'\\u27f7',\n'longmapsto;':'\\u27fc',\n'LongRightArrow;':'\\u27f6',\n'Longrightarrow;':'\\u27f9',\n'longrightarrow;':'\\u27f6',\n'looparrowleft;':'\\u21ab',\n'looparrowright;':'\\u21ac',\n'lopar;':'\\u2985',\n'Lopf;':'\\U0001d543',\n'lopf;':'\\U0001d55d',\n'loplus;':'\\u2a2d',\n'lotimes;':'\\u2a34',\n'lowast;':'\\u2217',\n'lowbar;':'_',\n'LowerLeftArrow;':'\\u2199',\n'LowerRightArrow;':'\\u2198',\n'loz;':'\\u25ca',\n'lozenge;':'\\u25ca',\n'lozf;':'\\u29eb',\n'lpar;':'(',\n'lparlt;':'\\u2993',\n'lrarr;':'\\u21c6',\n'lrcorner;':'\\u231f',\n'lrhar;':'\\u21cb',\n'lrhard;':'\\u296d',\n'lrm;':'\\u200e',\n'lrtri;':'\\u22bf',\n'lsaquo;':'\\u2039',\n'Lscr;':'\\u2112',\n'lscr;':'\\U0001d4c1',\n'Lsh;':'\\u21b0',\n'lsh;':'\\u21b0',\n'lsim;':'\\u2272',\n'lsime;':'\\u2a8d',\n'lsimg;':'\\u2a8f',\n'lsqb;':'[',\n'lsquo;':'\\u2018',\n'lsquor;':'\\u201a',\n'Lstrok;':'\\u0141',\n'lstrok;':'\\u0142',\n'LT':'<',\n'lt':'<',\n'LT;':'<',\n'Lt;':'\\u226a',\n'lt;':'<',\n'ltcc;':'\\u2aa6',\n'ltcir;':'\\u2a79',\n'ltdot;':'\\u22d6',\n'lthree;':'\\u22cb',\n'ltimes;':'\\u22c9',\n'ltlarr;':'\\u2976',\n'ltquest;':'\\u2a7b',\n'ltri;':'\\u25c3',\n'ltrie;':'\\u22b4',\n'ltrif;':'\\u25c2',\n'ltrPar;':'\\u2996',\n'lurdshar;':'\\u294a',\n'luruhar;':'\\u2966',\n'lvertneqq;':'\\u2268\\ufe00',\n'lvnE;':'\\u2268\\ufe00',\n'macr':'\\xaf',\n'macr;':'\\xaf',\n'male;':'\\u2642',\n'malt;':'\\u2720',\n'maltese;':'\\u2720',\n'Map;':'\\u2905',\n'map;':'\\u21a6',\n'mapsto;':'\\u21a6',\n'mapstodown;':'\\u21a7',\n'mapstoleft;':'\\u21a4',\n'mapstoup;':'\\u21a5',\n'marker;':'\\u25ae',\n'mcomma;':'\\u2a29',\n'Mcy;':'\\u041c',\n'mcy;':'\\u043c',\n'mdash;':'\\u2014',\n'mDDot;':'\\u223a',\n'measuredangle;':'\\u2221',\n'MediumSpace;':'\\u205f',\n'Mellintrf;':'\\u2133',\n'Mfr;':'\\U0001d510',\n'mfr;':'\\U0001d52a',\n'mho;':'\\u2127',\n'micro':'\\xb5',\n'micro;':'\\xb5',\n'mid;':'\\u2223',\n'midast;':'*',\n'midcir;':'\\u2af0',\n'middot':'\\xb7',\n'middot;':'\\xb7',\n'minus;':'\\u2212',\n'minusb;':'\\u229f',\n'minusd;':'\\u2238',\n'minusdu;':'\\u2a2a',\n'MinusPlus;':'\\u2213',\n'mlcp;':'\\u2adb',\n'mldr;':'\\u2026',\n'mnplus;':'\\u2213',\n'models;':'\\u22a7',\n'Mopf;':'\\U0001d544',\n'mopf;':'\\U0001d55e',\n'mp;':'\\u2213',\n'Mscr;':'\\u2133',\n'mscr;':'\\U0001d4c2',\n'mstpos;':'\\u223e',\n'Mu;':'\\u039c',\n'mu;':'\\u03bc',\n'multimap;':'\\u22b8',\n'mumap;':'\\u22b8',\n'nabla;':'\\u2207',\n'Nacute;':'\\u0143',\n'nacute;':'\\u0144',\n'nang;':'\\u2220\\u20d2',\n'nap;':'\\u2249',\n'napE;':'\\u2a70\\u0338',\n'napid;':'\\u224b\\u0338',\n'napos;':'\\u0149',\n'napprox;':'\\u2249',\n'natur;':'\\u266e',\n'natural;':'\\u266e',\n'naturals;':'\\u2115',\n'nbsp':'\\xa0',\n'nbsp;':'\\xa0',\n'nbump;':'\\u224e\\u0338',\n'nbumpe;':'\\u224f\\u0338',\n'ncap;':'\\u2a43',\n'Ncaron;':'\\u0147',\n'ncaron;':'\\u0148',\n'Ncedil;':'\\u0145',\n'ncedil;':'\\u0146',\n'ncong;':'\\u2247',\n'ncongdot;':'\\u2a6d\\u0338',\n'ncup;':'\\u2a42',\n'Ncy;':'\\u041d',\n'ncy;':'\\u043d',\n'ndash;':'\\u2013',\n'ne;':'\\u2260',\n'nearhk;':'\\u2924',\n'neArr;':'\\u21d7',\n'nearr;':'\\u2197',\n'nearrow;':'\\u2197',\n'nedot;':'\\u2250\\u0338',\n'NegativeMediumSpace;':'\\u200b',\n'NegativeThickSpace;':'\\u200b',\n'NegativeThinSpace;':'\\u200b',\n'NegativeVeryThinSpace;':'\\u200b',\n'nequiv;':'\\u2262',\n'nesear;':'\\u2928',\n'nesim;':'\\u2242\\u0338',\n'NestedGreaterGreater;':'\\u226b',\n'NestedLessLess;':'\\u226a',\n'NewLine;':'\\n',\n'nexist;':'\\u2204',\n'nexists;':'\\u2204',\n'Nfr;':'\\U0001d511',\n'nfr;':'\\U0001d52b',\n'ngE;':'\\u2267\\u0338',\n'nge;':'\\u2271',\n'ngeq;':'\\u2271',\n'ngeqq;':'\\u2267\\u0338',\n'ngeqslant;':'\\u2a7e\\u0338',\n'nges;':'\\u2a7e\\u0338',\n'nGg;':'\\u22d9\\u0338',\n'ngsim;':'\\u2275',\n'nGt;':'\\u226b\\u20d2',\n'ngt;':'\\u226f',\n'ngtr;':'\\u226f',\n'nGtv;':'\\u226b\\u0338',\n'nhArr;':'\\u21ce',\n'nharr;':'\\u21ae',\n'nhpar;':'\\u2af2',\n'ni;':'\\u220b',\n'nis;':'\\u22fc',\n'nisd;':'\\u22fa',\n'niv;':'\\u220b',\n'NJcy;':'\\u040a',\n'njcy;':'\\u045a',\n'nlArr;':'\\u21cd',\n'nlarr;':'\\u219a',\n'nldr;':'\\u2025',\n'nlE;':'\\u2266\\u0338',\n'nle;':'\\u2270',\n'nLeftarrow;':'\\u21cd',\n'nleftarrow;':'\\u219a',\n'nLeftrightarrow;':'\\u21ce',\n'nleftrightarrow;':'\\u21ae',\n'nleq;':'\\u2270',\n'nleqq;':'\\u2266\\u0338',\n'nleqslant;':'\\u2a7d\\u0338',\n'nles;':'\\u2a7d\\u0338',\n'nless;':'\\u226e',\n'nLl;':'\\u22d8\\u0338',\n'nlsim;':'\\u2274',\n'nLt;':'\\u226a\\u20d2',\n'nlt;':'\\u226e',\n'nltri;':'\\u22ea',\n'nltrie;':'\\u22ec',\n'nLtv;':'\\u226a\\u0338',\n'nmid;':'\\u2224',\n'NoBreak;':'\\u2060',\n'NonBreakingSpace;':'\\xa0',\n'Nopf;':'\\u2115',\n'nopf;':'\\U0001d55f',\n'not':'\\xac',\n'Not;':'\\u2aec',\n'not;':'\\xac',\n'NotCongruent;':'\\u2262',\n'NotCupCap;':'\\u226d',\n'NotDoubleVerticalBar;':'\\u2226',\n'NotElement;':'\\u2209',\n'NotEqual;':'\\u2260',\n'NotEqualTilde;':'\\u2242\\u0338',\n'NotExists;':'\\u2204',\n'NotGreater;':'\\u226f',\n'NotGreaterEqual;':'\\u2271',\n'NotGreaterFullEqual;':'\\u2267\\u0338',\n'NotGreaterGreater;':'\\u226b\\u0338',\n'NotGreaterLess;':'\\u2279',\n'NotGreaterSlantEqual;':'\\u2a7e\\u0338',\n'NotGreaterTilde;':'\\u2275',\n'NotHumpDownHump;':'\\u224e\\u0338',\n'NotHumpEqual;':'\\u224f\\u0338',\n'notin;':'\\u2209',\n'notindot;':'\\u22f5\\u0338',\n'notinE;':'\\u22f9\\u0338',\n'notinva;':'\\u2209',\n'notinvb;':'\\u22f7',\n'notinvc;':'\\u22f6',\n'NotLeftTriangle;':'\\u22ea',\n'NotLeftTriangleBar;':'\\u29cf\\u0338',\n'NotLeftTriangleEqual;':'\\u22ec',\n'NotLess;':'\\u226e',\n'NotLessEqual;':'\\u2270',\n'NotLessGreater;':'\\u2278',\n'NotLessLess;':'\\u226a\\u0338',\n'NotLessSlantEqual;':'\\u2a7d\\u0338',\n'NotLessTilde;':'\\u2274',\n'NotNestedGreaterGreater;':'\\u2aa2\\u0338',\n'NotNestedLessLess;':'\\u2aa1\\u0338',\n'notni;':'\\u220c',\n'notniva;':'\\u220c',\n'notnivb;':'\\u22fe',\n'notnivc;':'\\u22fd',\n'NotPrecedes;':'\\u2280',\n'NotPrecedesEqual;':'\\u2aaf\\u0338',\n'NotPrecedesSlantEqual;':'\\u22e0',\n'NotReverseElement;':'\\u220c',\n'NotRightTriangle;':'\\u22eb',\n'NotRightTriangleBar;':'\\u29d0\\u0338',\n'NotRightTriangleEqual;':'\\u22ed',\n'NotSquareSubset;':'\\u228f\\u0338',\n'NotSquareSubsetEqual;':'\\u22e2',\n'NotSquareSuperset;':'\\u2290\\u0338',\n'NotSquareSupersetEqual;':'\\u22e3',\n'NotSubset;':'\\u2282\\u20d2',\n'NotSubsetEqual;':'\\u2288',\n'NotSucceeds;':'\\u2281',\n'NotSucceedsEqual;':'\\u2ab0\\u0338',\n'NotSucceedsSlantEqual;':'\\u22e1',\n'NotSucceedsTilde;':'\\u227f\\u0338',\n'NotSuperset;':'\\u2283\\u20d2',\n'NotSupersetEqual;':'\\u2289',\n'NotTilde;':'\\u2241',\n'NotTildeEqual;':'\\u2244',\n'NotTildeFullEqual;':'\\u2247',\n'NotTildeTilde;':'\\u2249',\n'NotVerticalBar;':'\\u2224',\n'npar;':'\\u2226',\n'nparallel;':'\\u2226',\n'nparsl;':'\\u2afd\\u20e5',\n'npart;':'\\u2202\\u0338',\n'npolint;':'\\u2a14',\n'npr;':'\\u2280',\n'nprcue;':'\\u22e0',\n'npre;':'\\u2aaf\\u0338',\n'nprec;':'\\u2280',\n'npreceq;':'\\u2aaf\\u0338',\n'nrArr;':'\\u21cf',\n'nrarr;':'\\u219b',\n'nrarrc;':'\\u2933\\u0338',\n'nrarrw;':'\\u219d\\u0338',\n'nRightarrow;':'\\u21cf',\n'nrightarrow;':'\\u219b',\n'nrtri;':'\\u22eb',\n'nrtrie;':'\\u22ed',\n'nsc;':'\\u2281',\n'nsccue;':'\\u22e1',\n'nsce;':'\\u2ab0\\u0338',\n'Nscr;':'\\U0001d4a9',\n'nscr;':'\\U0001d4c3',\n'nshortmid;':'\\u2224',\n'nshortparallel;':'\\u2226',\n'nsim;':'\\u2241',\n'nsime;':'\\u2244',\n'nsimeq;':'\\u2244',\n'nsmid;':'\\u2224',\n'nspar;':'\\u2226',\n'nsqsube;':'\\u22e2',\n'nsqsupe;':'\\u22e3',\n'nsub;':'\\u2284',\n'nsubE;':'\\u2ac5\\u0338',\n'nsube;':'\\u2288',\n'nsubset;':'\\u2282\\u20d2',\n'nsubseteq;':'\\u2288',\n'nsubseteqq;':'\\u2ac5\\u0338',\n'nsucc;':'\\u2281',\n'nsucceq;':'\\u2ab0\\u0338',\n'nsup;':'\\u2285',\n'nsupE;':'\\u2ac6\\u0338',\n'nsupe;':'\\u2289',\n'nsupset;':'\\u2283\\u20d2',\n'nsupseteq;':'\\u2289',\n'nsupseteqq;':'\\u2ac6\\u0338',\n'ntgl;':'\\u2279',\n'Ntilde':'\\xd1',\n'ntilde':'\\xf1',\n'Ntilde;':'\\xd1',\n'ntilde;':'\\xf1',\n'ntlg;':'\\u2278',\n'ntriangleleft;':'\\u22ea',\n'ntrianglelefteq;':'\\u22ec',\n'ntriangleright;':'\\u22eb',\n'ntrianglerighteq;':'\\u22ed',\n'Nu;':'\\u039d',\n'nu;':'\\u03bd',\n'num;':'#',\n'numero;':'\\u2116',\n'numsp;':'\\u2007',\n'nvap;':'\\u224d\\u20d2',\n'nVDash;':'\\u22af',\n'nVdash;':'\\u22ae',\n'nvDash;':'\\u22ad',\n'nvdash;':'\\u22ac',\n'nvge;':'\\u2265\\u20d2',\n'nvgt;':'>\\u20d2',\n'nvHarr;':'\\u2904',\n'nvinfin;':'\\u29de',\n'nvlArr;':'\\u2902',\n'nvle;':'\\u2264\\u20d2',\n'nvlt;':'<\\u20d2',\n'nvltrie;':'\\u22b4\\u20d2',\n'nvrArr;':'\\u2903',\n'nvrtrie;':'\\u22b5\\u20d2',\n'nvsim;':'\\u223c\\u20d2',\n'nwarhk;':'\\u2923',\n'nwArr;':'\\u21d6',\n'nwarr;':'\\u2196',\n'nwarrow;':'\\u2196',\n'nwnear;':'\\u2927',\n'Oacute':'\\xd3',\n'oacute':'\\xf3',\n'Oacute;':'\\xd3',\n'oacute;':'\\xf3',\n'oast;':'\\u229b',\n'ocir;':'\\u229a',\n'Ocirc':'\\xd4',\n'ocirc':'\\xf4',\n'Ocirc;':'\\xd4',\n'ocirc;':'\\xf4',\n'Ocy;':'\\u041e',\n'ocy;':'\\u043e',\n'odash;':'\\u229d',\n'Odblac;':'\\u0150',\n'odblac;':'\\u0151',\n'odiv;':'\\u2a38',\n'odot;':'\\u2299',\n'odsold;':'\\u29bc',\n'OElig;':'\\u0152',\n'oelig;':'\\u0153',\n'ofcir;':'\\u29bf',\n'Ofr;':'\\U0001d512',\n'ofr;':'\\U0001d52c',\n'ogon;':'\\u02db',\n'Ograve':'\\xd2',\n'ograve':'\\xf2',\n'Ograve;':'\\xd2',\n'ograve;':'\\xf2',\n'ogt;':'\\u29c1',\n'ohbar;':'\\u29b5',\n'ohm;':'\\u03a9',\n'oint;':'\\u222e',\n'olarr;':'\\u21ba',\n'olcir;':'\\u29be',\n'olcross;':'\\u29bb',\n'oline;':'\\u203e',\n'olt;':'\\u29c0',\n'Omacr;':'\\u014c',\n'omacr;':'\\u014d',\n'Omega;':'\\u03a9',\n'omega;':'\\u03c9',\n'Omicron;':'\\u039f',\n'omicron;':'\\u03bf',\n'omid;':'\\u29b6',\n'ominus;':'\\u2296',\n'Oopf;':'\\U0001d546',\n'oopf;':'\\U0001d560',\n'opar;':'\\u29b7',\n'OpenCurlyDoubleQuote;':'\\u201c',\n'OpenCurlyQuote;':'\\u2018',\n'operp;':'\\u29b9',\n'oplus;':'\\u2295',\n'Or;':'\\u2a54',\n'or;':'\\u2228',\n'orarr;':'\\u21bb',\n'ord;':'\\u2a5d',\n'order;':'\\u2134',\n'orderof;':'\\u2134',\n'ordf':'\\xaa',\n'ordf;':'\\xaa',\n'ordm':'\\xba',\n'ordm;':'\\xba',\n'origof;':'\\u22b6',\n'oror;':'\\u2a56',\n'orslope;':'\\u2a57',\n'orv;':'\\u2a5b',\n'oS;':'\\u24c8',\n'Oscr;':'\\U0001d4aa',\n'oscr;':'\\u2134',\n'Oslash':'\\xd8',\n'oslash':'\\xf8',\n'Oslash;':'\\xd8',\n'oslash;':'\\xf8',\n'osol;':'\\u2298',\n'Otilde':'\\xd5',\n'otilde':'\\xf5',\n'Otilde;':'\\xd5',\n'otilde;':'\\xf5',\n'Otimes;':'\\u2a37',\n'otimes;':'\\u2297',\n'otimesas;':'\\u2a36',\n'Ouml':'\\xd6',\n'ouml':'\\xf6',\n'Ouml;':'\\xd6',\n'ouml;':'\\xf6',\n'ovbar;':'\\u233d',\n'OverBar;':'\\u203e',\n'OverBrace;':'\\u23de',\n'OverBracket;':'\\u23b4',\n'OverParenthesis;':'\\u23dc',\n'par;':'\\u2225',\n'para':'\\xb6',\n'para;':'\\xb6',\n'parallel;':'\\u2225',\n'parsim;':'\\u2af3',\n'parsl;':'\\u2afd',\n'part;':'\\u2202',\n'PartialD;':'\\u2202',\n'Pcy;':'\\u041f',\n'pcy;':'\\u043f',\n'percnt;':'%',\n'period;':'.',\n'permil;':'\\u2030',\n'perp;':'\\u22a5',\n'pertenk;':'\\u2031',\n'Pfr;':'\\U0001d513',\n'pfr;':'\\U0001d52d',\n'Phi;':'\\u03a6',\n'phi;':'\\u03c6',\n'phiv;':'\\u03d5',\n'phmmat;':'\\u2133',\n'phone;':'\\u260e',\n'Pi;':'\\u03a0',\n'pi;':'\\u03c0',\n'pitchfork;':'\\u22d4',\n'piv;':'\\u03d6',\n'planck;':'\\u210f',\n'planckh;':'\\u210e',\n'plankv;':'\\u210f',\n'plus;':'+',\n'plusacir;':'\\u2a23',\n'plusb;':'\\u229e',\n'pluscir;':'\\u2a22',\n'plusdo;':'\\u2214',\n'plusdu;':'\\u2a25',\n'pluse;':'\\u2a72',\n'PlusMinus;':'\\xb1',\n'plusmn':'\\xb1',\n'plusmn;':'\\xb1',\n'plussim;':'\\u2a26',\n'plustwo;':'\\u2a27',\n'pm;':'\\xb1',\n'Poincareplane;':'\\u210c',\n'pointint;':'\\u2a15',\n'Popf;':'\\u2119',\n'popf;':'\\U0001d561',\n'pound':'\\xa3',\n'pound;':'\\xa3',\n'Pr;':'\\u2abb',\n'pr;':'\\u227a',\n'prap;':'\\u2ab7',\n'prcue;':'\\u227c',\n'prE;':'\\u2ab3',\n'pre;':'\\u2aaf',\n'prec;':'\\u227a',\n'precapprox;':'\\u2ab7',\n'preccurlyeq;':'\\u227c',\n'Precedes;':'\\u227a',\n'PrecedesEqual;':'\\u2aaf',\n'PrecedesSlantEqual;':'\\u227c',\n'PrecedesTilde;':'\\u227e',\n'preceq;':'\\u2aaf',\n'precnapprox;':'\\u2ab9',\n'precneqq;':'\\u2ab5',\n'precnsim;':'\\u22e8',\n'precsim;':'\\u227e',\n'Prime;':'\\u2033',\n'prime;':'\\u2032',\n'primes;':'\\u2119',\n'prnap;':'\\u2ab9',\n'prnE;':'\\u2ab5',\n'prnsim;':'\\u22e8',\n'prod;':'\\u220f',\n'Product;':'\\u220f',\n'profalar;':'\\u232e',\n'profline;':'\\u2312',\n'profsurf;':'\\u2313',\n'prop;':'\\u221d',\n'Proportion;':'\\u2237',\n'Proportional;':'\\u221d',\n'propto;':'\\u221d',\n'prsim;':'\\u227e',\n'prurel;':'\\u22b0',\n'Pscr;':'\\U0001d4ab',\n'pscr;':'\\U0001d4c5',\n'Psi;':'\\u03a8',\n'psi;':'\\u03c8',\n'puncsp;':'\\u2008',\n'Qfr;':'\\U0001d514',\n'qfr;':'\\U0001d52e',\n'qint;':'\\u2a0c',\n'Qopf;':'\\u211a',\n'qopf;':'\\U0001d562',\n'qprime;':'\\u2057',\n'Qscr;':'\\U0001d4ac',\n'qscr;':'\\U0001d4c6',\n'quaternions;':'\\u210d',\n'quatint;':'\\u2a16',\n'quest;':'?',\n'questeq;':'\\u225f',\n'QUOT':'\"',\n'quot':'\"',\n'QUOT;':'\"',\n'quot;':'\"',\n'rAarr;':'\\u21db',\n'race;':'\\u223d\\u0331',\n'Racute;':'\\u0154',\n'racute;':'\\u0155',\n'radic;':'\\u221a',\n'raemptyv;':'\\u29b3',\n'Rang;':'\\u27eb',\n'rang;':'\\u27e9',\n'rangd;':'\\u2992',\n'range;':'\\u29a5',\n'rangle;':'\\u27e9',\n'raquo':'\\xbb',\n'raquo;':'\\xbb',\n'Rarr;':'\\u21a0',\n'rArr;':'\\u21d2',\n'rarr;':'\\u2192',\n'rarrap;':'\\u2975',\n'rarrb;':'\\u21e5',\n'rarrbfs;':'\\u2920',\n'rarrc;':'\\u2933',\n'rarrfs;':'\\u291e',\n'rarrhk;':'\\u21aa',\n'rarrlp;':'\\u21ac',\n'rarrpl;':'\\u2945',\n'rarrsim;':'\\u2974',\n'Rarrtl;':'\\u2916',\n'rarrtl;':'\\u21a3',\n'rarrw;':'\\u219d',\n'rAtail;':'\\u291c',\n'ratail;':'\\u291a',\n'ratio;':'\\u2236',\n'rationals;':'\\u211a',\n'RBarr;':'\\u2910',\n'rBarr;':'\\u290f',\n'rbarr;':'\\u290d',\n'rbbrk;':'\\u2773',\n'rbrace;':'}',\n'rbrack;':']',\n'rbrke;':'\\u298c',\n'rbrksld;':'\\u298e',\n'rbrkslu;':'\\u2990',\n'Rcaron;':'\\u0158',\n'rcaron;':'\\u0159',\n'Rcedil;':'\\u0156',\n'rcedil;':'\\u0157',\n'rceil;':'\\u2309',\n'rcub;':'}',\n'Rcy;':'\\u0420',\n'rcy;':'\\u0440',\n'rdca;':'\\u2937',\n'rdldhar;':'\\u2969',\n'rdquo;':'\\u201d',\n'rdquor;':'\\u201d',\n'rdsh;':'\\u21b3',\n'Re;':'\\u211c',\n'real;':'\\u211c',\n'realine;':'\\u211b',\n'realpart;':'\\u211c',\n'reals;':'\\u211d',\n'rect;':'\\u25ad',\n'REG':'\\xae',\n'reg':'\\xae',\n'REG;':'\\xae',\n'reg;':'\\xae',\n'ReverseElement;':'\\u220b',\n'ReverseEquilibrium;':'\\u21cb',\n'ReverseUpEquilibrium;':'\\u296f',\n'rfisht;':'\\u297d',\n'rfloor;':'\\u230b',\n'Rfr;':'\\u211c',\n'rfr;':'\\U0001d52f',\n'rHar;':'\\u2964',\n'rhard;':'\\u21c1',\n'rharu;':'\\u21c0',\n'rharul;':'\\u296c',\n'Rho;':'\\u03a1',\n'rho;':'\\u03c1',\n'rhov;':'\\u03f1',\n'RightAngleBracket;':'\\u27e9',\n'RightArrow;':'\\u2192',\n'Rightarrow;':'\\u21d2',\n'rightarrow;':'\\u2192',\n'RightArrowBar;':'\\u21e5',\n'RightArrowLeftArrow;':'\\u21c4',\n'rightarrowtail;':'\\u21a3',\n'RightCeiling;':'\\u2309',\n'RightDoubleBracket;':'\\u27e7',\n'RightDownTeeVector;':'\\u295d',\n'RightDownVector;':'\\u21c2',\n'RightDownVectorBar;':'\\u2955',\n'RightFloor;':'\\u230b',\n'rightharpoondown;':'\\u21c1',\n'rightharpoonup;':'\\u21c0',\n'rightleftarrows;':'\\u21c4',\n'rightleftharpoons;':'\\u21cc',\n'rightrightarrows;':'\\u21c9',\n'rightsquigarrow;':'\\u219d',\n'RightTee;':'\\u22a2',\n'RightTeeArrow;':'\\u21a6',\n'RightTeeVector;':'\\u295b',\n'rightthreetimes;':'\\u22cc',\n'RightTriangle;':'\\u22b3',\n'RightTriangleBar;':'\\u29d0',\n'RightTriangleEqual;':'\\u22b5',\n'RightUpDownVector;':'\\u294f',\n'RightUpTeeVector;':'\\u295c',\n'RightUpVector;':'\\u21be',\n'RightUpVectorBar;':'\\u2954',\n'RightVector;':'\\u21c0',\n'RightVectorBar;':'\\u2953',\n'ring;':'\\u02da',\n'risingdotseq;':'\\u2253',\n'rlarr;':'\\u21c4',\n'rlhar;':'\\u21cc',\n'rlm;':'\\u200f',\n'rmoust;':'\\u23b1',\n'rmoustache;':'\\u23b1',\n'rnmid;':'\\u2aee',\n'roang;':'\\u27ed',\n'roarr;':'\\u21fe',\n'robrk;':'\\u27e7',\n'ropar;':'\\u2986',\n'Ropf;':'\\u211d',\n'ropf;':'\\U0001d563',\n'roplus;':'\\u2a2e',\n'rotimes;':'\\u2a35',\n'RoundImplies;':'\\u2970',\n'rpar;':')',\n'rpargt;':'\\u2994',\n'rppolint;':'\\u2a12',\n'rrarr;':'\\u21c9',\n'Rrightarrow;':'\\u21db',\n'rsaquo;':'\\u203a',\n'Rscr;':'\\u211b',\n'rscr;':'\\U0001d4c7',\n'Rsh;':'\\u21b1',\n'rsh;':'\\u21b1',\n'rsqb;':']',\n'rsquo;':'\\u2019',\n'rsquor;':'\\u2019',\n'rthree;':'\\u22cc',\n'rtimes;':'\\u22ca',\n'rtri;':'\\u25b9',\n'rtrie;':'\\u22b5',\n'rtrif;':'\\u25b8',\n'rtriltri;':'\\u29ce',\n'RuleDelayed;':'\\u29f4',\n'ruluhar;':'\\u2968',\n'rx;':'\\u211e',\n'Sacute;':'\\u015a',\n'sacute;':'\\u015b',\n'sbquo;':'\\u201a',\n'Sc;':'\\u2abc',\n'sc;':'\\u227b',\n'scap;':'\\u2ab8',\n'Scaron;':'\\u0160',\n'scaron;':'\\u0161',\n'sccue;':'\\u227d',\n'scE;':'\\u2ab4',\n'sce;':'\\u2ab0',\n'Scedil;':'\\u015e',\n'scedil;':'\\u015f',\n'Scirc;':'\\u015c',\n'scirc;':'\\u015d',\n'scnap;':'\\u2aba',\n'scnE;':'\\u2ab6',\n'scnsim;':'\\u22e9',\n'scpolint;':'\\u2a13',\n'scsim;':'\\u227f',\n'Scy;':'\\u0421',\n'scy;':'\\u0441',\n'sdot;':'\\u22c5',\n'sdotb;':'\\u22a1',\n'sdote;':'\\u2a66',\n'searhk;':'\\u2925',\n'seArr;':'\\u21d8',\n'searr;':'\\u2198',\n'searrow;':'\\u2198',\n'sect':'\\xa7',\n'sect;':'\\xa7',\n'semi;':';',\n'seswar;':'\\u2929',\n'setminus;':'\\u2216',\n'setmn;':'\\u2216',\n'sext;':'\\u2736',\n'Sfr;':'\\U0001d516',\n'sfr;':'\\U0001d530',\n'sfrown;':'\\u2322',\n'sharp;':'\\u266f',\n'SHCHcy;':'\\u0429',\n'shchcy;':'\\u0449',\n'SHcy;':'\\u0428',\n'shcy;':'\\u0448',\n'ShortDownArrow;':'\\u2193',\n'ShortLeftArrow;':'\\u2190',\n'shortmid;':'\\u2223',\n'shortparallel;':'\\u2225',\n'ShortRightArrow;':'\\u2192',\n'ShortUpArrow;':'\\u2191',\n'shy':'\\xad',\n'shy;':'\\xad',\n'Sigma;':'\\u03a3',\n'sigma;':'\\u03c3',\n'sigmaf;':'\\u03c2',\n'sigmav;':'\\u03c2',\n'sim;':'\\u223c',\n'simdot;':'\\u2a6a',\n'sime;':'\\u2243',\n'simeq;':'\\u2243',\n'simg;':'\\u2a9e',\n'simgE;':'\\u2aa0',\n'siml;':'\\u2a9d',\n'simlE;':'\\u2a9f',\n'simne;':'\\u2246',\n'simplus;':'\\u2a24',\n'simrarr;':'\\u2972',\n'slarr;':'\\u2190',\n'SmallCircle;':'\\u2218',\n'smallsetminus;':'\\u2216',\n'smashp;':'\\u2a33',\n'smeparsl;':'\\u29e4',\n'smid;':'\\u2223',\n'smile;':'\\u2323',\n'smt;':'\\u2aaa',\n'smte;':'\\u2aac',\n'smtes;':'\\u2aac\\ufe00',\n'SOFTcy;':'\\u042c',\n'softcy;':'\\u044c',\n'sol;':'/',\n'solb;':'\\u29c4',\n'solbar;':'\\u233f',\n'Sopf;':'\\U0001d54a',\n'sopf;':'\\U0001d564',\n'spades;':'\\u2660',\n'spadesuit;':'\\u2660',\n'spar;':'\\u2225',\n'sqcap;':'\\u2293',\n'sqcaps;':'\\u2293\\ufe00',\n'sqcup;':'\\u2294',\n'sqcups;':'\\u2294\\ufe00',\n'Sqrt;':'\\u221a',\n'sqsub;':'\\u228f',\n'sqsube;':'\\u2291',\n'sqsubset;':'\\u228f',\n'sqsubseteq;':'\\u2291',\n'sqsup;':'\\u2290',\n'sqsupe;':'\\u2292',\n'sqsupset;':'\\u2290',\n'sqsupseteq;':'\\u2292',\n'squ;':'\\u25a1',\n'Square;':'\\u25a1',\n'square;':'\\u25a1',\n'SquareIntersection;':'\\u2293',\n'SquareSubset;':'\\u228f',\n'SquareSubsetEqual;':'\\u2291',\n'SquareSuperset;':'\\u2290',\n'SquareSupersetEqual;':'\\u2292',\n'SquareUnion;':'\\u2294',\n'squarf;':'\\u25aa',\n'squf;':'\\u25aa',\n'srarr;':'\\u2192',\n'Sscr;':'\\U0001d4ae',\n'sscr;':'\\U0001d4c8',\n'ssetmn;':'\\u2216',\n'ssmile;':'\\u2323',\n'sstarf;':'\\u22c6',\n'Star;':'\\u22c6',\n'star;':'\\u2606',\n'starf;':'\\u2605',\n'straightepsilon;':'\\u03f5',\n'straightphi;':'\\u03d5',\n'strns;':'\\xaf',\n'Sub;':'\\u22d0',\n'sub;':'\\u2282',\n'subdot;':'\\u2abd',\n'subE;':'\\u2ac5',\n'sube;':'\\u2286',\n'subedot;':'\\u2ac3',\n'submult;':'\\u2ac1',\n'subnE;':'\\u2acb',\n'subne;':'\\u228a',\n'subplus;':'\\u2abf',\n'subrarr;':'\\u2979',\n'Subset;':'\\u22d0',\n'subset;':'\\u2282',\n'subseteq;':'\\u2286',\n'subseteqq;':'\\u2ac5',\n'SubsetEqual;':'\\u2286',\n'subsetneq;':'\\u228a',\n'subsetneqq;':'\\u2acb',\n'subsim;':'\\u2ac7',\n'subsub;':'\\u2ad5',\n'subsup;':'\\u2ad3',\n'succ;':'\\u227b',\n'succapprox;':'\\u2ab8',\n'succcurlyeq;':'\\u227d',\n'Succeeds;':'\\u227b',\n'SucceedsEqual;':'\\u2ab0',\n'SucceedsSlantEqual;':'\\u227d',\n'SucceedsTilde;':'\\u227f',\n'succeq;':'\\u2ab0',\n'succnapprox;':'\\u2aba',\n'succneqq;':'\\u2ab6',\n'succnsim;':'\\u22e9',\n'succsim;':'\\u227f',\n'SuchThat;':'\\u220b',\n'Sum;':'\\u2211',\n'sum;':'\\u2211',\n'sung;':'\\u266a',\n'sup1':'\\xb9',\n'sup1;':'\\xb9',\n'sup2':'\\xb2',\n'sup2;':'\\xb2',\n'sup3':'\\xb3',\n'sup3;':'\\xb3',\n'Sup;':'\\u22d1',\n'sup;':'\\u2283',\n'supdot;':'\\u2abe',\n'supdsub;':'\\u2ad8',\n'supE;':'\\u2ac6',\n'supe;':'\\u2287',\n'supedot;':'\\u2ac4',\n'Superset;':'\\u2283',\n'SupersetEqual;':'\\u2287',\n'suphsol;':'\\u27c9',\n'suphsub;':'\\u2ad7',\n'suplarr;':'\\u297b',\n'supmult;':'\\u2ac2',\n'supnE;':'\\u2acc',\n'supne;':'\\u228b',\n'supplus;':'\\u2ac0',\n'Supset;':'\\u22d1',\n'supset;':'\\u2283',\n'supseteq;':'\\u2287',\n'supseteqq;':'\\u2ac6',\n'supsetneq;':'\\u228b',\n'supsetneqq;':'\\u2acc',\n'supsim;':'\\u2ac8',\n'supsub;':'\\u2ad4',\n'supsup;':'\\u2ad6',\n'swarhk;':'\\u2926',\n'swArr;':'\\u21d9',\n'swarr;':'\\u2199',\n'swarrow;':'\\u2199',\n'swnwar;':'\\u292a',\n'szlig':'\\xdf',\n'szlig;':'\\xdf',\n'Tab;':'\\t',\n'target;':'\\u2316',\n'Tau;':'\\u03a4',\n'tau;':'\\u03c4',\n'tbrk;':'\\u23b4',\n'Tcaron;':'\\u0164',\n'tcaron;':'\\u0165',\n'Tcedil;':'\\u0162',\n'tcedil;':'\\u0163',\n'Tcy;':'\\u0422',\n'tcy;':'\\u0442',\n'tdot;':'\\u20db',\n'telrec;':'\\u2315',\n'Tfr;':'\\U0001d517',\n'tfr;':'\\U0001d531',\n'there4;':'\\u2234',\n'Therefore;':'\\u2234',\n'therefore;':'\\u2234',\n'Theta;':'\\u0398',\n'theta;':'\\u03b8',\n'thetasym;':'\\u03d1',\n'thetav;':'\\u03d1',\n'thickapprox;':'\\u2248',\n'thicksim;':'\\u223c',\n'ThickSpace;':'\\u205f\\u200a',\n'thinsp;':'\\u2009',\n'ThinSpace;':'\\u2009',\n'thkap;':'\\u2248',\n'thksim;':'\\u223c',\n'THORN':'\\xde',\n'thorn':'\\xfe',\n'THORN;':'\\xde',\n'thorn;':'\\xfe',\n'Tilde;':'\\u223c',\n'tilde;':'\\u02dc',\n'TildeEqual;':'\\u2243',\n'TildeFullEqual;':'\\u2245',\n'TildeTilde;':'\\u2248',\n'times':'\\xd7',\n'times;':'\\xd7',\n'timesb;':'\\u22a0',\n'timesbar;':'\\u2a31',\n'timesd;':'\\u2a30',\n'tint;':'\\u222d',\n'toea;':'\\u2928',\n'top;':'\\u22a4',\n'topbot;':'\\u2336',\n'topcir;':'\\u2af1',\n'Topf;':'\\U0001d54b',\n'topf;':'\\U0001d565',\n'topfork;':'\\u2ada',\n'tosa;':'\\u2929',\n'tprime;':'\\u2034',\n'TRADE;':'\\u2122',\n'trade;':'\\u2122',\n'triangle;':'\\u25b5',\n'triangledown;':'\\u25bf',\n'triangleleft;':'\\u25c3',\n'trianglelefteq;':'\\u22b4',\n'triangleq;':'\\u225c',\n'triangleright;':'\\u25b9',\n'trianglerighteq;':'\\u22b5',\n'tridot;':'\\u25ec',\n'trie;':'\\u225c',\n'triminus;':'\\u2a3a',\n'TripleDot;':'\\u20db',\n'triplus;':'\\u2a39',\n'trisb;':'\\u29cd',\n'tritime;':'\\u2a3b',\n'trpezium;':'\\u23e2',\n'Tscr;':'\\U0001d4af',\n'tscr;':'\\U0001d4c9',\n'TScy;':'\\u0426',\n'tscy;':'\\u0446',\n'TSHcy;':'\\u040b',\n'tshcy;':'\\u045b',\n'Tstrok;':'\\u0166',\n'tstrok;':'\\u0167',\n'twixt;':'\\u226c',\n'twoheadleftarrow;':'\\u219e',\n'twoheadrightarrow;':'\\u21a0',\n'Uacute':'\\xda',\n'uacute':'\\xfa',\n'Uacute;':'\\xda',\n'uacute;':'\\xfa',\n'Uarr;':'\\u219f',\n'uArr;':'\\u21d1',\n'uarr;':'\\u2191',\n'Uarrocir;':'\\u2949',\n'Ubrcy;':'\\u040e',\n'ubrcy;':'\\u045e',\n'Ubreve;':'\\u016c',\n'ubreve;':'\\u016d',\n'Ucirc':'\\xdb',\n'ucirc':'\\xfb',\n'Ucirc;':'\\xdb',\n'ucirc;':'\\xfb',\n'Ucy;':'\\u0423',\n'ucy;':'\\u0443',\n'udarr;':'\\u21c5',\n'Udblac;':'\\u0170',\n'udblac;':'\\u0171',\n'udhar;':'\\u296e',\n'ufisht;':'\\u297e',\n'Ufr;':'\\U0001d518',\n'ufr;':'\\U0001d532',\n'Ugrave':'\\xd9',\n'ugrave':'\\xf9',\n'Ugrave;':'\\xd9',\n'ugrave;':'\\xf9',\n'uHar;':'\\u2963',\n'uharl;':'\\u21bf',\n'uharr;':'\\u21be',\n'uhblk;':'\\u2580',\n'ulcorn;':'\\u231c',\n'ulcorner;':'\\u231c',\n'ulcrop;':'\\u230f',\n'ultri;':'\\u25f8',\n'Umacr;':'\\u016a',\n'umacr;':'\\u016b',\n'uml':'\\xa8',\n'uml;':'\\xa8',\n'UnderBar;':'_',\n'UnderBrace;':'\\u23df',\n'UnderBracket;':'\\u23b5',\n'UnderParenthesis;':'\\u23dd',\n'Union;':'\\u22c3',\n'UnionPlus;':'\\u228e',\n'Uogon;':'\\u0172',\n'uogon;':'\\u0173',\n'Uopf;':'\\U0001d54c',\n'uopf;':'\\U0001d566',\n'UpArrow;':'\\u2191',\n'Uparrow;':'\\u21d1',\n'uparrow;':'\\u2191',\n'UpArrowBar;':'\\u2912',\n'UpArrowDownArrow;':'\\u21c5',\n'UpDownArrow;':'\\u2195',\n'Updownarrow;':'\\u21d5',\n'updownarrow;':'\\u2195',\n'UpEquilibrium;':'\\u296e',\n'upharpoonleft;':'\\u21bf',\n'upharpoonright;':'\\u21be',\n'uplus;':'\\u228e',\n'UpperLeftArrow;':'\\u2196',\n'UpperRightArrow;':'\\u2197',\n'Upsi;':'\\u03d2',\n'upsi;':'\\u03c5',\n'upsih;':'\\u03d2',\n'Upsilon;':'\\u03a5',\n'upsilon;':'\\u03c5',\n'UpTee;':'\\u22a5',\n'UpTeeArrow;':'\\u21a5',\n'upuparrows;':'\\u21c8',\n'urcorn;':'\\u231d',\n'urcorner;':'\\u231d',\n'urcrop;':'\\u230e',\n'Uring;':'\\u016e',\n'uring;':'\\u016f',\n'urtri;':'\\u25f9',\n'Uscr;':'\\U0001d4b0',\n'uscr;':'\\U0001d4ca',\n'utdot;':'\\u22f0',\n'Utilde;':'\\u0168',\n'utilde;':'\\u0169',\n'utri;':'\\u25b5',\n'utrif;':'\\u25b4',\n'uuarr;':'\\u21c8',\n'Uuml':'\\xdc',\n'uuml':'\\xfc',\n'Uuml;':'\\xdc',\n'uuml;':'\\xfc',\n'uwangle;':'\\u29a7',\n'vangrt;':'\\u299c',\n'varepsilon;':'\\u03f5',\n'varkappa;':'\\u03f0',\n'varnothing;':'\\u2205',\n'varphi;':'\\u03d5',\n'varpi;':'\\u03d6',\n'varpropto;':'\\u221d',\n'vArr;':'\\u21d5',\n'varr;':'\\u2195',\n'varrho;':'\\u03f1',\n'varsigma;':'\\u03c2',\n'varsubsetneq;':'\\u228a\\ufe00',\n'varsubsetneqq;':'\\u2acb\\ufe00',\n'varsupsetneq;':'\\u228b\\ufe00',\n'varsupsetneqq;':'\\u2acc\\ufe00',\n'vartheta;':'\\u03d1',\n'vartriangleleft;':'\\u22b2',\n'vartriangleright;':'\\u22b3',\n'Vbar;':'\\u2aeb',\n'vBar;':'\\u2ae8',\n'vBarv;':'\\u2ae9',\n'Vcy;':'\\u0412',\n'vcy;':'\\u0432',\n'VDash;':'\\u22ab',\n'Vdash;':'\\u22a9',\n'vDash;':'\\u22a8',\n'vdash;':'\\u22a2',\n'Vdashl;':'\\u2ae6',\n'Vee;':'\\u22c1',\n'vee;':'\\u2228',\n'veebar;':'\\u22bb',\n'veeeq;':'\\u225a',\n'vellip;':'\\u22ee',\n'Verbar;':'\\u2016',\n'verbar;':'|',\n'Vert;':'\\u2016',\n'vert;':'|',\n'VerticalBar;':'\\u2223',\n'VerticalLine;':'|',\n'VerticalSeparator;':'\\u2758',\n'VerticalTilde;':'\\u2240',\n'VeryThinSpace;':'\\u200a',\n'Vfr;':'\\U0001d519',\n'vfr;':'\\U0001d533',\n'vltri;':'\\u22b2',\n'vnsub;':'\\u2282\\u20d2',\n'vnsup;':'\\u2283\\u20d2',\n'Vopf;':'\\U0001d54d',\n'vopf;':'\\U0001d567',\n'vprop;':'\\u221d',\n'vrtri;':'\\u22b3',\n'Vscr;':'\\U0001d4b1',\n'vscr;':'\\U0001d4cb',\n'vsubnE;':'\\u2acb\\ufe00',\n'vsubne;':'\\u228a\\ufe00',\n'vsupnE;':'\\u2acc\\ufe00',\n'vsupne;':'\\u228b\\ufe00',\n'Vvdash;':'\\u22aa',\n'vzigzag;':'\\u299a',\n'Wcirc;':'\\u0174',\n'wcirc;':'\\u0175',\n'wedbar;':'\\u2a5f',\n'Wedge;':'\\u22c0',\n'wedge;':'\\u2227',\n'wedgeq;':'\\u2259',\n'weierp;':'\\u2118',\n'Wfr;':'\\U0001d51a',\n'wfr;':'\\U0001d534',\n'Wopf;':'\\U0001d54e',\n'wopf;':'\\U0001d568',\n'wp;':'\\u2118',\n'wr;':'\\u2240',\n'wreath;':'\\u2240',\n'Wscr;':'\\U0001d4b2',\n'wscr;':'\\U0001d4cc',\n'xcap;':'\\u22c2',\n'xcirc;':'\\u25ef',\n'xcup;':'\\u22c3',\n'xdtri;':'\\u25bd',\n'Xfr;':'\\U0001d51b',\n'xfr;':'\\U0001d535',\n'xhArr;':'\\u27fa',\n'xharr;':'\\u27f7',\n'Xi;':'\\u039e',\n'xi;':'\\u03be',\n'xlArr;':'\\u27f8',\n'xlarr;':'\\u27f5',\n'xmap;':'\\u27fc',\n'xnis;':'\\u22fb',\n'xodot;':'\\u2a00',\n'Xopf;':'\\U0001d54f',\n'xopf;':'\\U0001d569',\n'xoplus;':'\\u2a01',\n'xotime;':'\\u2a02',\n'xrArr;':'\\u27f9',\n'xrarr;':'\\u27f6',\n'Xscr;':'\\U0001d4b3',\n'xscr;':'\\U0001d4cd',\n'xsqcup;':'\\u2a06',\n'xuplus;':'\\u2a04',\n'xutri;':'\\u25b3',\n'xvee;':'\\u22c1',\n'xwedge;':'\\u22c0',\n'Yacute':'\\xdd',\n'yacute':'\\xfd',\n'Yacute;':'\\xdd',\n'yacute;':'\\xfd',\n'YAcy;':'\\u042f',\n'yacy;':'\\u044f',\n'Ycirc;':'\\u0176',\n'ycirc;':'\\u0177',\n'Ycy;':'\\u042b',\n'ycy;':'\\u044b',\n'yen':'\\xa5',\n'yen;':'\\xa5',\n'Yfr;':'\\U0001d51c',\n'yfr;':'\\U0001d536',\n'YIcy;':'\\u0407',\n'yicy;':'\\u0457',\n'Yopf;':'\\U0001d550',\n'yopf;':'\\U0001d56a',\n'Yscr;':'\\U0001d4b4',\n'yscr;':'\\U0001d4ce',\n'YUcy;':'\\u042e',\n'yucy;':'\\u044e',\n'yuml':'\\xff',\n'Yuml;':'\\u0178',\n'yuml;':'\\xff',\n'Zacute;':'\\u0179',\n'zacute;':'\\u017a',\n'Zcaron;':'\\u017d',\n'zcaron;':'\\u017e',\n'Zcy;':'\\u0417',\n'zcy;':'\\u0437',\n'Zdot;':'\\u017b',\n'zdot;':'\\u017c',\n'zeetrf;':'\\u2128',\n'ZeroWidthSpace;':'\\u200b',\n'Zeta;':'\\u0396',\n'zeta;':'\\u03b6',\n'Zfr;':'\\u2128',\n'zfr;':'\\U0001d537',\n'ZHcy;':'\\u0416',\n'zhcy;':'\\u0436',\n'zigrarr;':'\\u21dd',\n'Zopf;':'\\u2124',\n'zopf;':'\\U0001d56b',\n'Zscr;':'\\U0001d4b5',\n'zscr;':'\\U0001d4cf',\n'zwj;':'\\u200d',\n'zwnj;':'\\u200c',\n}\n\n\ncodepoint2name={}\n\n\n\nentitydefs={}\n\nfor (name,codepoint)in name2codepoint.items():\n codepoint2name[codepoint]=name\n entitydefs[name]=chr(codepoint)\n \ndel name,codepoint\n"], "argparse": [".py", "\n\n\"\"\"Command-line parsing library\n\nThis module is an optparse-inspired command-line parsing library that:\n\n - handles both optional and positional arguments\n - produces highly informative usage messages\n - supports parsers that dispatch to sub-parsers\n\nThe following is a simple usage example that sums integers from the\ncommand-line and writes the result to a file::\n\n parser = argparse.ArgumentParser(\n description='sum the integers at the command line')\n parser.add_argument(\n 'integers', metavar='int', nargs='+', type=int,\n help='an integer to be summed')\n parser.add_argument(\n '--log', default=sys.stdout, type=argparse.FileType('w'),\n help='the file where the sum should be written')\n args = parser.parse_args()\n args.log.write('%s' % sum(args.integers))\n args.log.close()\n\nThe module contains the following public classes:\n\n - ArgumentParser -- The main entry point for command-line parsing. As the\n example above shows, the add_argument() method is used to populate\n the parser with actions for optional and positional arguments. Then\n the parse_args() method is invoked to convert the args at the\n command-line into an object with attributes.\n\n - ArgumentError -- The exception raised by ArgumentParser objects when\n there are errors with the parser's actions. Errors raised while\n parsing the command-line are caught by ArgumentParser and emitted\n as command-line messages.\n\n - FileType -- A factory for defining types of files to be created. As the\n example above shows, instances of FileType are typically passed as\n the type= argument of add_argument() calls.\n\n - Action -- The base class for parser actions. Typically actions are\n selected by passing strings like 'store_true' or 'append_const' to\n the action= argument of add_argument(). However, for greater\n customization of ArgumentParser actions, subclasses of Action may\n be defined and passed as the action= argument.\n\n - HelpFormatter, RawDescriptionHelpFormatter, RawTextHelpFormatter,\n ArgumentDefaultsHelpFormatter -- Formatter classes which\n may be passed as the formatter_class= argument to the\n ArgumentParser constructor. HelpFormatter is the default,\n RawDescriptionHelpFormatter and RawTextHelpFormatter tell the parser\n not to change the formatting for help text, and\n ArgumentDefaultsHelpFormatter adds information about argument defaults\n to the help.\n\nAll other classes in this module are considered implementation details.\n(Also note that HelpFormatter and RawDescriptionHelpFormatter are only\nconsidered public as object names -- the API of the formatter objects is\nstill considered an implementation detail.)\n\"\"\"\n\n__version__='1.1'\n__all__=[\n'ArgumentParser',\n'ArgumentError',\n'ArgumentTypeError',\n'FileType',\n'HelpFormatter',\n'ArgumentDefaultsHelpFormatter',\n'RawDescriptionHelpFormatter',\n'RawTextHelpFormatter',\n'MetavarTypeHelpFormatter',\n'Namespace',\n'Action',\n'ONE_OR_MORE',\n'OPTIONAL',\n'PARSER',\n'REMAINDER',\n'SUPPRESS',\n'ZERO_OR_MORE',\n]\n\n\nimport collections as _collections\nimport copy as _copy\nimport os as _os\nimport re as _re\nimport sys as _sys\nimport textwrap as _textwrap\n\nfrom gettext import gettext as _,ngettext\n\n\nSUPPRESS='==SUPPRESS=='\n\nOPTIONAL='?'\nZERO_OR_MORE='*'\nONE_OR_MORE='+'\nPARSER='A...'\nREMAINDER='...'\n_UNRECOGNIZED_ARGS_ATTR='_unrecognized_args'\n\n\n\n\n\nclass _AttributeHolder(object):\n ''\n\n\n\n\n\n \n \n def __repr__(self):\n type_name=type(self).__name__\n arg_strings=[]\n for arg in self._get_args():\n arg_strings.append(repr(arg))\n for name,value in self._get_kwargs():\n arg_strings.append('%s=%r'%(name,value))\n return'%s(%s)'%(type_name,', '.join(arg_strings))\n \n def _get_kwargs(self):\n return sorted(self.__dict__.items())\n \n def _get_args(self):\n return []\n \n \ndef _ensure_value(namespace,name,value):\n if getattr(namespace,name,None )is None :\n setattr(namespace,name,value)\n return getattr(namespace,name)\n \n \n \n \n \n \nclass HelpFormatter(object):\n ''\n\n\n\n \n \n def __init__(self,\n prog,\n indent_increment=2,\n max_help_position=24,\n width=None ):\n \n \n if width is None :\n try :\n width=int(_os.environ['COLUMNS'])\n except (KeyError,ValueError):\n width=80\n width -=2\n \n self._prog=prog\n self._indent_increment=indent_increment\n self._max_help_position=max_help_position\n self._max_help_position=min(max_help_position,\n max(width -20,indent_increment *2))\n self._width=width\n \n self._current_indent=0\n self._level=0\n self._action_max_length=0\n \n self._root_section=self._Section(self,None )\n self._current_section=self._root_section\n \n self._whitespace_matcher=_re.compile(r'\\s+')\n self._long_break_matcher=_re.compile(r'\\n\\n\\n+')\n \n \n \n \n def _indent(self):\n self._current_indent +=self._indent_increment\n self._level +=1\n \n def _dedent(self):\n self._current_indent -=self._indent_increment\n assert self._current_indent >=0,'Indent decreased below 0.'\n self._level -=1\n \n class _Section(object):\n \n def __init__(self,formatter,parent,heading=None ):\n self.formatter=formatter\n self.parent=parent\n self.heading=heading\n self.items=[]\n \n def format_help(self):\n \n if self.parent is not None :\n self.formatter._indent()\n join=self.formatter._join_parts\n for func,args in self.items:\n func(*args)\n item_help=join([func(*args)for func,args in self.items])\n if self.parent is not None :\n self.formatter._dedent()\n \n \n if not item_help:\n return''\n \n \n if self.heading is not SUPPRESS and self.heading is not None :\n current_indent=self.formatter._current_indent\n heading='%*s%s:\\n'%(current_indent,'',self.heading)\n else :\n heading=''\n \n \n return join(['\\n',heading,item_help,'\\n'])\n \n def _add_item(self,func,args):\n self._current_section.items.append((func,args))\n \n \n \n \n def start_section(self,heading):\n self._indent()\n section=self._Section(self,self._current_section,heading)\n self._add_item(section.format_help,[])\n self._current_section=section\n \n def end_section(self):\n self._current_section=self._current_section.parent\n self._dedent()\n \n def add_text(self,text):\n if text is not SUPPRESS and text is not None :\n self._add_item(self._format_text,[text])\n \n def add_usage(self,usage,actions,groups,prefix=None ):\n if usage is not SUPPRESS:\n args=usage,actions,groups,prefix\n self._add_item(self._format_usage,args)\n \n def add_argument(self,action):\n if action.help is not SUPPRESS:\n \n \n get_invocation=self._format_action_invocation\n invocations=[get_invocation(action)]\n for subaction in self._iter_indented_subactions(action):\n invocations.append(get_invocation(subaction))\n \n \n invocation_length=max([len(s)for s in invocations])\n action_length=invocation_length+self._current_indent\n self._action_max_length=max(self._action_max_length,\n action_length)\n \n \n self._add_item(self._format_action,[action])\n \n def add_arguments(self,actions):\n for action in actions:\n self.add_argument(action)\n \n \n \n \n def format_help(self):\n help=self._root_section.format_help()\n if help:\n help=self._long_break_matcher.sub('\\n\\n',help)\n help=help.strip('\\n')+'\\n'\n return help\n \n def _join_parts(self,part_strings):\n return''.join([part\n for part in part_strings\n if part and part is not SUPPRESS])\n \n def _format_usage(self,usage,actions,groups,prefix):\n if prefix is None :\n prefix=_('usage: ')\n \n \n if usage is not None :\n usage=usage %dict(prog=self._prog)\n \n \n elif usage is None and not actions:\n usage='%(prog)s'%dict(prog=self._prog)\n \n \n elif usage is None :\n prog='%(prog)s'%dict(prog=self._prog)\n \n \n optionals=[]\n positionals=[]\n for action in actions:\n if action.option_strings:\n optionals.append(action)\n else :\n positionals.append(action)\n \n \n format=self._format_actions_usage\n action_usage=format(optionals+positionals,groups)\n usage=' '.join([s for s in [prog,action_usage]if s])\n \n \n text_width=self._width -self._current_indent\n if len(prefix)+len(usage)>text_width:\n \n \n part_regexp=r'\\(.*?\\)+|\\[.*?\\]+|\\S+'\n opt_usage=format(optionals,groups)\n pos_usage=format(positionals,groups)\n opt_parts=_re.findall(part_regexp,opt_usage)\n pos_parts=_re.findall(part_regexp,pos_usage)\n assert' '.join(opt_parts)==opt_usage\n assert' '.join(pos_parts)==pos_usage\n \n \n def get_lines(parts,indent,prefix=None ):\n lines=[]\n line=[]\n if prefix is not None :\n line_len=len(prefix)-1\n else :\n line_len=len(indent)-1\n for part in parts:\n if line_len+1+len(part)>text_width and line:\n lines.append(indent+' '.join(line))\n line=[]\n line_len=len(indent)-1\n line.append(part)\n line_len +=len(part)+1\n if line:\n lines.append(indent+' '.join(line))\n if prefix is not None :\n lines[0]=lines[0][len(indent):]\n return lines\n \n \n if len(prefix)+len(prog)<=0.75 *text_width:\n indent=' '*(len(prefix)+len(prog)+1)\n if opt_parts:\n lines=get_lines([prog]+opt_parts,indent,prefix)\n lines.extend(get_lines(pos_parts,indent))\n elif pos_parts:\n lines=get_lines([prog]+pos_parts,indent,prefix)\n else :\n lines=[prog]\n \n \n else :\n indent=' '*len(prefix)\n parts=opt_parts+pos_parts\n lines=get_lines(parts,indent)\n if len(lines)>1:\n lines=[]\n lines.extend(get_lines(opt_parts,indent))\n lines.extend(get_lines(pos_parts,indent))\n lines=[prog]+lines\n \n \n usage='\\n'.join(lines)\n \n \n return'%s%s\\n\\n'%(prefix,usage)\n \n def _format_actions_usage(self,actions,groups):\n \n group_actions=set()\n inserts={}\n for group in groups:\n try :\n start=actions.index(group._group_actions[0])\n except ValueError:\n continue\n else :\n end=start+len(group._group_actions)\n if actions[start:end]==group._group_actions:\n for action in group._group_actions:\n group_actions.add(action)\n if not group.required:\n if start in inserts:\n inserts[start]+=' ['\n else :\n inserts[start]='['\n inserts[end]=']'\n else :\n if start in inserts:\n inserts[start]+=' ('\n else :\n inserts[start]='('\n inserts[end]=')'\n for i in range(start+1,end):\n inserts[i]='|'\n \n \n parts=[]\n for i,action in enumerate(actions):\n \n \n \n if action.help is SUPPRESS:\n parts.append(None )\n if inserts.get(i)=='|':\n inserts.pop(i)\n elif inserts.get(i+1)=='|':\n inserts.pop(i+1)\n \n \n elif not action.option_strings:\n default=self._get_default_metavar_for_positional(action)\n part=self._format_args(action,default)\n \n \n if action in group_actions:\n if part[0]=='['and part[-1]==']':\n part=part[1:-1]\n \n \n parts.append(part)\n \n \n else :\n option_string=action.option_strings[0]\n \n \n \n if action.nargs ==0:\n part='%s'%option_string\n \n \n \n else :\n default=self._get_default_metavar_for_optional(action)\n args_string=self._format_args(action,default)\n part='%s %s'%(option_string,args_string)\n \n \n if not action.required and action not in group_actions:\n part='[%s]'%part\n \n \n parts.append(part)\n \n \n for i in sorted(inserts,reverse=True ):\n parts[i:i]=[inserts[i]]\n \n \n text=' '.join([item for item in parts if item is not None ])\n \n \n open=r'[\\[(]'\n close=r'[\\])]'\n text=_re.sub(r'(%s) '%open,r'\\1',text)\n text=_re.sub(r' (%s)'%close,r'\\1',text)\n text=_re.sub(r'%s *%s'%(open,close),r'',text)\n text=_re.sub(r'\\(([^|]*)\\)',r'\\1',text)\n text=text.strip()\n \n \n return text\n \n def _format_text(self,text):\n if'%(prog)'in text:\n text=text %dict(prog=self._prog)\n text_width=max(self._width -self._current_indent,11)\n indent=' '*self._current_indent\n return self._fill_text(text,text_width,indent)+'\\n\\n'\n \n def _format_action(self,action):\n \n help_position=min(self._action_max_length+2,\n self._max_help_position)\n help_width=max(self._width -help_position,11)\n action_width=help_position -self._current_indent -2\n action_header=self._format_action_invocation(action)\n \n \n if not action.help:\n tup=self._current_indent,'',action_header\n action_header='%*s%s\\n'%tup\n \n \n elif len(action_header)<=action_width:\n tup=self._current_indent,'',action_width,action_header\n action_header='%*s%-*s '%tup\n indent_first=0\n \n \n else :\n tup=self._current_indent,'',action_header\n action_header='%*s%s\\n'%tup\n indent_first=help_position\n \n \n parts=[action_header]\n \n \n if action.help:\n help_text=self._expand_help(action)\n help_lines=self._split_lines(help_text,help_width)\n parts.append('%*s%s\\n'%(indent_first,'',help_lines[0]))\n for line in help_lines[1:]:\n parts.append('%*s%s\\n'%(help_position,'',line))\n \n \n elif not action_header.endswith('\\n'):\n parts.append('\\n')\n \n \n for subaction in self._iter_indented_subactions(action):\n parts.append(self._format_action(subaction))\n \n \n return self._join_parts(parts)\n \n def _format_action_invocation(self,action):\n if not action.option_strings:\n default=self._get_default_metavar_for_positional(action)\n metavar,=self._metavar_formatter(action,default)(1)\n return metavar\n \n else :\n parts=[]\n \n \n \n if action.nargs ==0:\n parts.extend(action.option_strings)\n \n \n \n else :\n default=self._get_default_metavar_for_optional(action)\n args_string=self._format_args(action,default)\n for option_string in action.option_strings:\n parts.append('%s %s'%(option_string,args_string))\n \n return', '.join(parts)\n \n def _metavar_formatter(self,action,default_metavar):\n if action.metavar is not None :\n result=action.metavar\n elif action.choices is not None :\n choice_strs=[str(choice)for choice in action.choices]\n result='{%s}'%','.join(choice_strs)\n else :\n result=default_metavar\n \n def format(tuple_size):\n if isinstance(result,tuple):\n return result\n else :\n return (result,)*tuple_size\n return format\n \n def _format_args(self,action,default_metavar):\n get_metavar=self._metavar_formatter(action,default_metavar)\n if action.nargs is None :\n result='%s'%get_metavar(1)\n elif action.nargs ==OPTIONAL:\n result='[%s]'%get_metavar(1)\n elif action.nargs ==ZERO_OR_MORE:\n result='[%s [%s ...]]'%get_metavar(2)\n elif action.nargs ==ONE_OR_MORE:\n result='%s [%s ...]'%get_metavar(2)\n elif action.nargs ==REMAINDER:\n result='...'\n elif action.nargs ==PARSER:\n result='%s ...'%get_metavar(1)\n else :\n formats=['%s'for _ in range(action.nargs)]\n result=' '.join(formats)%get_metavar(action.nargs)\n return result\n \n def _expand_help(self,action):\n params=dict(vars(action),prog=self._prog)\n for name in list(params):\n if params[name]is SUPPRESS:\n del params[name]\n for name in list(params):\n if hasattr(params[name],'__name__'):\n params[name]=params[name].__name__\n if params.get('choices')is not None :\n choices_str=', '.join([str(c)for c in params['choices']])\n params['choices']=choices_str\n return self._get_help_string(action)%params\n \n def _iter_indented_subactions(self,action):\n try :\n get_subactions=action._get_subactions\n except AttributeError:\n pass\n else :\n self._indent()\n yield from get_subactions()\n self._dedent()\n \n def _split_lines(self,text,width):\n text=self._whitespace_matcher.sub(' ',text).strip()\n return _textwrap.wrap(text,width)\n \n def _fill_text(self,text,width,indent):\n text=self._whitespace_matcher.sub(' ',text).strip()\n return _textwrap.fill(text,width,initial_indent=indent,\n subsequent_indent=indent)\n \n def _get_help_string(self,action):\n return action.help\n \n def _get_default_metavar_for_optional(self,action):\n return action.dest.upper()\n \n def _get_default_metavar_for_positional(self,action):\n return action.dest\n \n \nclass RawDescriptionHelpFormatter(HelpFormatter):\n ''\n\n\n\n \n \n def _fill_text(self,text,width,indent):\n return''.join(indent+line for line in text.splitlines(keepends=True ))\n \n \nclass RawTextHelpFormatter(RawDescriptionHelpFormatter):\n ''\n\n\n\n \n \n def _split_lines(self,text,width):\n return text.splitlines()\n \n \nclass ArgumentDefaultsHelpFormatter(HelpFormatter):\n ''\n\n\n\n \n \n def _get_help_string(self,action):\n help=action.help\n if'%(default)'not in action.help:\n if action.default is not SUPPRESS:\n defaulting_nargs=[OPTIONAL,ZERO_OR_MORE]\n if action.option_strings or action.nargs in defaulting_nargs:\n help +=' (default: %(default)s)'\n return help\n \n \nclass MetavarTypeHelpFormatter(HelpFormatter):\n ''\n\n\n\n\n \n \n def _get_default_metavar_for_optional(self,action):\n return action.type.__name__\n \n def _get_default_metavar_for_positional(self,action):\n return action.type.__name__\n \n \n \n \n \n \n \ndef _get_action_name(argument):\n if argument is None :\n return None\n elif argument.option_strings:\n return'/'.join(argument.option_strings)\n elif argument.metavar not in (None ,SUPPRESS):\n return argument.metavar\n elif argument.dest not in (None ,SUPPRESS):\n return argument.dest\n else :\n return None\n \n \nclass ArgumentError(Exception):\n ''\n\n\n\n \n \n def __init__(self,argument,message):\n self.argument_name=_get_action_name(argument)\n self.message=message\n \n def __str__(self):\n if self.argument_name is None :\n format='%(message)s'\n else :\n format='argument %(argument_name)s: %(message)s'\n return format %dict(message=self.message,\n argument_name=self.argument_name)\n \n \nclass ArgumentTypeError(Exception):\n ''\n pass\n \n \n \n \n \n \nclass Action(_AttributeHolder):\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n \n def __init__(self,\n option_strings,\n dest,\n nargs=None ,\n const=None ,\n default=None ,\n type=None ,\n choices=None ,\n required=False ,\n help=None ,\n metavar=None ):\n self.option_strings=option_strings\n self.dest=dest\n self.nargs=nargs\n self.const=const\n self.default=default\n self.type=type\n self.choices=choices\n self.required=required\n self.help=help\n self.metavar=metavar\n \n def _get_kwargs(self):\n names=[\n 'option_strings',\n 'dest',\n 'nargs',\n 'const',\n 'default',\n 'type',\n 'choices',\n 'help',\n 'metavar',\n ]\n return [(name,getattr(self,name))for name in names]\n \n def __call__(self,parser,namespace,values,option_string=None ):\n raise NotImplementedError(_('.__call__() not defined'))\n \n \nclass _StoreAction(Action):\n\n def __init__(self,\n option_strings,\n dest,\n nargs=None ,\n const=None ,\n default=None ,\n type=None ,\n choices=None ,\n required=False ,\n help=None ,\n metavar=None ):\n if nargs ==0:\n raise ValueError('nargs for store actions must be > 0; if you '\n 'have nothing to store, actions such as store '\n 'true or store const may be more appropriate')\n if const is not None and nargs !=OPTIONAL:\n raise ValueError('nargs must be %r to supply const'%OPTIONAL)\n super(_StoreAction,self).__init__(\n option_strings=option_strings,\n dest=dest,\n nargs=nargs,\n const=const,\n default=default,\n type=type,\n choices=choices,\n required=required,\n help=help,\n metavar=metavar)\n \n def __call__(self,parser,namespace,values,option_string=None ):\n setattr(namespace,self.dest,values)\n \n \nclass _StoreConstAction(Action):\n\n def __init__(self,\n option_strings,\n dest,\n const,\n default=None ,\n required=False ,\n help=None ,\n metavar=None ):\n super(_StoreConstAction,self).__init__(\n option_strings=option_strings,\n dest=dest,\n nargs=0,\n const=const,\n default=default,\n required=required,\n help=help)\n \n def __call__(self,parser,namespace,values,option_string=None ):\n setattr(namespace,self.dest,self.const)\n \n \nclass _StoreTrueAction(_StoreConstAction):\n\n def __init__(self,\n option_strings,\n dest,\n default=False ,\n required=False ,\n help=None ):\n super(_StoreTrueAction,self).__init__(\n option_strings=option_strings,\n dest=dest,\n const=True ,\n default=default,\n required=required,\n help=help)\n \n \nclass _StoreFalseAction(_StoreConstAction):\n\n def __init__(self,\n option_strings,\n dest,\n default=True ,\n required=False ,\n help=None ):\n super(_StoreFalseAction,self).__init__(\n option_strings=option_strings,\n dest=dest,\n const=False ,\n default=default,\n required=required,\n help=help)\n \n \nclass _AppendAction(Action):\n\n def __init__(self,\n option_strings,\n dest,\n nargs=None ,\n const=None ,\n default=None ,\n type=None ,\n choices=None ,\n required=False ,\n help=None ,\n metavar=None ):\n if nargs ==0:\n raise ValueError('nargs for append actions must be > 0; if arg '\n 'strings are not supplying the value to append, '\n 'the append const action may be more appropriate')\n if const is not None and nargs !=OPTIONAL:\n raise ValueError('nargs must be %r to supply const'%OPTIONAL)\n super(_AppendAction,self).__init__(\n option_strings=option_strings,\n dest=dest,\n nargs=nargs,\n const=const,\n default=default,\n type=type,\n choices=choices,\n required=required,\n help=help,\n metavar=metavar)\n \n def __call__(self,parser,namespace,values,option_string=None ):\n items=_copy.copy(_ensure_value(namespace,self.dest,[]))\n items.append(values)\n setattr(namespace,self.dest,items)\n \n \nclass _AppendConstAction(Action):\n\n def __init__(self,\n option_strings,\n dest,\n const,\n default=None ,\n required=False ,\n help=None ,\n metavar=None ):\n super(_AppendConstAction,self).__init__(\n option_strings=option_strings,\n dest=dest,\n nargs=0,\n const=const,\n default=default,\n required=required,\n help=help,\n metavar=metavar)\n \n def __call__(self,parser,namespace,values,option_string=None ):\n items=_copy.copy(_ensure_value(namespace,self.dest,[]))\n items.append(self.const)\n setattr(namespace,self.dest,items)\n \n \nclass _CountAction(Action):\n\n def __init__(self,\n option_strings,\n dest,\n default=None ,\n required=False ,\n help=None ):\n super(_CountAction,self).__init__(\n option_strings=option_strings,\n dest=dest,\n nargs=0,\n default=default,\n required=required,\n help=help)\n \n def __call__(self,parser,namespace,values,option_string=None ):\n new_count=_ensure_value(namespace,self.dest,0)+1\n setattr(namespace,self.dest,new_count)\n \n \nclass _HelpAction(Action):\n\n def __init__(self,\n option_strings,\n dest=SUPPRESS,\n default=SUPPRESS,\n help=None ):\n super(_HelpAction,self).__init__(\n option_strings=option_strings,\n dest=dest,\n default=default,\n nargs=0,\n help=help)\n \n def __call__(self,parser,namespace,values,option_string=None ):\n parser.print_help()\n parser.exit()\n \n \nclass _VersionAction(Action):\n\n def __init__(self,\n option_strings,\n version=None ,\n dest=SUPPRESS,\n default=SUPPRESS,\n help=\"show program's version number and exit\"):\n super(_VersionAction,self).__init__(\n option_strings=option_strings,\n dest=dest,\n default=default,\n nargs=0,\n help=help)\n self.version=version\n \n def __call__(self,parser,namespace,values,option_string=None ):\n version=self.version\n if version is None :\n version=parser.version\n formatter=parser._get_formatter()\n formatter.add_text(version)\n parser._print_message(formatter.format_help(),_sys.stdout)\n parser.exit()\n \n \nclass _SubParsersAction(Action):\n\n class _ChoicesPseudoAction(Action):\n \n def __init__(self,name,aliases,help):\n metavar=dest=name\n if aliases:\n metavar +=' (%s)'%', '.join(aliases)\n sup=super(_SubParsersAction._ChoicesPseudoAction,self)\n sup.__init__(option_strings=[],dest=dest,help=help,\n metavar=metavar)\n \n def __init__(self,\n option_strings,\n prog,\n parser_class,\n dest=SUPPRESS,\n help=None ,\n metavar=None ):\n \n self._prog_prefix=prog\n self._parser_class=parser_class\n self._name_parser_map=_collections.OrderedDict()\n self._choices_actions=[]\n \n super(_SubParsersAction,self).__init__(\n option_strings=option_strings,\n dest=dest,\n nargs=PARSER,\n choices=self._name_parser_map,\n help=help,\n metavar=metavar)\n \n def add_parser(self,name,**kwargs):\n \n if kwargs.get('prog')is None :\n kwargs['prog']='%s %s'%(self._prog_prefix,name)\n \n aliases=kwargs.pop('aliases',())\n \n \n if'help'in kwargs:\n help=kwargs.pop('help')\n choice_action=self._ChoicesPseudoAction(name,aliases,help)\n self._choices_actions.append(choice_action)\n \n \n parser=self._parser_class(**kwargs)\n self._name_parser_map[name]=parser\n \n \n for alias in aliases:\n self._name_parser_map[alias]=parser\n \n return parser\n \n def _get_subactions(self):\n return self._choices_actions\n \n def __call__(self,parser,namespace,values,option_string=None ):\n parser_name=values[0]\n arg_strings=values[1:]\n \n \n if self.dest is not SUPPRESS:\n setattr(namespace,self.dest,parser_name)\n \n \n try :\n parser=self._name_parser_map[parser_name]\n except KeyError:\n args={'parser_name':parser_name,\n 'choices':', '.join(self._name_parser_map)}\n msg=_('unknown parser %(parser_name)r (choices: %(choices)s)')%args\n raise ArgumentError(self,msg)\n \n \n \n \n \n \n \n \n subnamespace,arg_strings=parser.parse_known_args(arg_strings,None )\n for key,value in vars(subnamespace).items():\n setattr(namespace,key,value)\n \n if arg_strings:\n vars(namespace).setdefault(_UNRECOGNIZED_ARGS_ATTR,[])\n getattr(namespace,_UNRECOGNIZED_ARGS_ATTR).extend(arg_strings)\n \n \n \n \n \n \nclass FileType(object):\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n \n def __init__(self,mode='r',bufsize=-1,encoding=None ,errors=None ):\n self._mode=mode\n self._bufsize=bufsize\n self._encoding=encoding\n self._errors=errors\n \n def __call__(self,string):\n \n if string =='-':\n if'r'in self._mode:\n return _sys.stdin\n elif'w'in self._mode:\n return _sys.stdout\n else :\n msg=_('argument \"-\" with mode %r')%self._mode\n raise ValueError(msg)\n \n \n try :\n return open(string,self._mode,self._bufsize,self._encoding,\n self._errors)\n except OSError as e:\n message=_(\"can't open '%s': %s\")\n raise ArgumentTypeError(message %(string,e))\n \n def __repr__(self):\n args=self._mode,self._bufsize\n kwargs=[('encoding',self._encoding),('errors',self._errors)]\n args_str=', '.join([repr(arg)for arg in args if arg !=-1]+\n ['%s=%r'%(kw,arg)for kw,arg in kwargs\n if arg is not None ])\n return'%s(%s)'%(type(self).__name__,args_str)\n \n \n \n \n \nclass Namespace(_AttributeHolder):\n ''\n\n\n\n \n \n def __init__(self,**kwargs):\n for name in kwargs:\n setattr(self,name,kwargs[name])\n \n def __eq__(self,other):\n if not isinstance(other,Namespace):\n return NotImplemented\n return vars(self)==vars(other)\n \n def __ne__(self,other):\n if not isinstance(other,Namespace):\n return NotImplemented\n return not (self ==other)\n \n def __contains__(self,key):\n return key in self.__dict__\n \n \nclass _ActionsContainer(object):\n\n def __init__(self,\n description,\n prefix_chars,\n argument_default,\n conflict_handler):\n super(_ActionsContainer,self).__init__()\n \n self.description=description\n self.argument_default=argument_default\n self.prefix_chars=prefix_chars\n self.conflict_handler=conflict_handler\n \n \n self._registries={}\n \n \n self.register('action',None ,_StoreAction)\n self.register('action','store',_StoreAction)\n self.register('action','store_const',_StoreConstAction)\n self.register('action','store_true',_StoreTrueAction)\n self.register('action','store_false',_StoreFalseAction)\n self.register('action','append',_AppendAction)\n self.register('action','append_const',_AppendConstAction)\n self.register('action','count',_CountAction)\n self.register('action','help',_HelpAction)\n self.register('action','version',_VersionAction)\n self.register('action','parsers',_SubParsersAction)\n \n \n self._get_handler()\n \n \n self._actions=[]\n self._option_string_actions={}\n \n \n self._action_groups=[]\n self._mutually_exclusive_groups=[]\n \n \n self._defaults={}\n \n \n self._negative_number_matcher=_re.compile(r'^-\\d+$|^-\\d*\\.\\d+$')\n \n \n \n self._has_negative_number_optionals=[]\n \n \n \n \n def register(self,registry_name,value,object):\n registry=self._registries.setdefault(registry_name,{})\n registry[value]=object\n \n def _registry_get(self,registry_name,value,default=None ):\n return self._registries[registry_name].get(value,default)\n \n \n \n \n def set_defaults(self,**kwargs):\n self._defaults.update(kwargs)\n \n \n \n for action in self._actions:\n if action.dest in kwargs:\n action.default=kwargs[action.dest]\n \n def get_default(self,dest):\n for action in self._actions:\n if action.dest ==dest and action.default is not None :\n return action.default\n return self._defaults.get(dest,None )\n \n \n \n \n \n def add_argument(self,*args,**kwargs):\n ''\n\n\n \n \n \n \n \n chars=self.prefix_chars\n if not args or len(args)==1 and args[0][0]not in chars:\n if args and'dest'in kwargs:\n raise ValueError('dest supplied twice for positional argument')\n kwargs=self._get_positional_kwargs(*args,**kwargs)\n \n \n else :\n kwargs=self._get_optional_kwargs(*args,**kwargs)\n \n \n if'default'not in kwargs:\n dest=kwargs['dest']\n if dest in self._defaults:\n kwargs['default']=self._defaults[dest]\n elif self.argument_default is not None :\n kwargs['default']=self.argument_default\n \n \n action_class=self._pop_action_class(kwargs)\n if not callable(action_class):\n raise ValueError('unknown action \"%s\"'%(action_class,))\n action=action_class(**kwargs)\n \n \n type_func=self._registry_get('type',action.type,action.type)\n if not callable(type_func):\n raise ValueError('%r is not callable'%(type_func,))\n \n \n if hasattr(self,\"_get_formatter\"):\n try :\n self._get_formatter()._format_args(action,None )\n except TypeError:\n raise ValueError(\"length of metavar tuple does not match nargs\")\n \n return self._add_action(action)\n \n def add_argument_group(self,*args,**kwargs):\n group=_ArgumentGroup(self,*args,**kwargs)\n self._action_groups.append(group)\n return group\n \n def add_mutually_exclusive_group(self,**kwargs):\n group=_MutuallyExclusiveGroup(self,**kwargs)\n self._mutually_exclusive_groups.append(group)\n return group\n \n def _add_action(self,action):\n \n self._check_conflict(action)\n \n \n self._actions.append(action)\n action.container=self\n \n \n for option_string in action.option_strings:\n self._option_string_actions[option_string]=action\n \n \n for option_string in action.option_strings:\n if self._negative_number_matcher.match(option_string):\n if not self._has_negative_number_optionals:\n self._has_negative_number_optionals.append(True )\n \n \n return action\n \n def _remove_action(self,action):\n self._actions.remove(action)\n \n def _add_container_actions(self,container):\n \n title_group_map={}\n for group in self._action_groups:\n if group.title in title_group_map:\n msg=_('cannot merge actions - two groups are named %r')\n raise ValueError(msg %(group.title))\n title_group_map[group.title]=group\n \n \n group_map={}\n for group in container._action_groups:\n \n \n \n if group.title not in title_group_map:\n title_group_map[group.title]=self.add_argument_group(\n title=group.title,\n description=group.description,\n conflict_handler=group.conflict_handler)\n \n \n for action in group._group_actions:\n group_map[action]=title_group_map[group.title]\n \n \n \n \n for group in container._mutually_exclusive_groups:\n mutex_group=self.add_mutually_exclusive_group(\n required=group.required)\n \n \n for action in group._group_actions:\n group_map[action]=mutex_group\n \n \n for action in container._actions:\n group_map.get(action,self)._add_action(action)\n \n def _get_positional_kwargs(self,dest,**kwargs):\n \n if'required'in kwargs:\n msg=_(\"'required' is an invalid argument for positionals\")\n raise TypeError(msg)\n \n \n \n if kwargs.get('nargs')not in [OPTIONAL,ZERO_OR_MORE]:\n kwargs['required']=True\n if kwargs.get('nargs')==ZERO_OR_MORE and'default'not in kwargs:\n kwargs['required']=True\n \n \n return dict(kwargs,dest=dest,option_strings=[])\n \n def _get_optional_kwargs(self,*args,**kwargs):\n \n option_strings=[]\n long_option_strings=[]\n for option_string in args:\n \n if not option_string[0]in self.prefix_chars:\n args={'option':option_string,\n 'prefix_chars':self.prefix_chars}\n msg=_('invalid option string %(option)r: '\n 'must start with a character %(prefix_chars)r')\n raise ValueError(msg %args)\n \n \n option_strings.append(option_string)\n if option_string[0]in self.prefix_chars:\n if len(option_string)>1:\n if option_string[1]in self.prefix_chars:\n long_option_strings.append(option_string)\n \n \n dest=kwargs.pop('dest',None )\n if dest is None :\n if long_option_strings:\n dest_option_string=long_option_strings[0]\n else :\n dest_option_string=option_strings[0]\n dest=dest_option_string.lstrip(self.prefix_chars)\n if not dest:\n msg=_('dest= is required for options like %r')\n raise ValueError(msg %option_string)\n dest=dest.replace('-','_')\n \n \n return dict(kwargs,dest=dest,option_strings=option_strings)\n \n def _pop_action_class(self,kwargs,default=None ):\n action=kwargs.pop('action',default)\n return self._registry_get('action',action,action)\n \n def _get_handler(self):\n \n handler_func_name='_handle_conflict_%s'%self.conflict_handler\n try :\n return getattr(self,handler_func_name)\n except AttributeError:\n msg=_('invalid conflict_resolution value: %r')\n raise ValueError(msg %self.conflict_handler)\n \n def _check_conflict(self,action):\n \n \n confl_optionals=[]\n for option_string in action.option_strings:\n if option_string in self._option_string_actions:\n confl_optional=self._option_string_actions[option_string]\n confl_optionals.append((option_string,confl_optional))\n \n \n if confl_optionals:\n conflict_handler=self._get_handler()\n conflict_handler(action,confl_optionals)\n \n def _handle_conflict_error(self,action,conflicting_actions):\n message=ngettext('conflicting option string: %s',\n 'conflicting option strings: %s',\n len(conflicting_actions))\n conflict_string=', '.join([option_string\n for option_string,action\n in conflicting_actions])\n raise ArgumentError(action,message %conflict_string)\n \n def _handle_conflict_resolve(self,action,conflicting_actions):\n \n \n for option_string,action in conflicting_actions:\n \n \n action.option_strings.remove(option_string)\n self._option_string_actions.pop(option_string,None )\n \n \n \n if not action.option_strings:\n action.container._remove_action(action)\n \n \nclass _ArgumentGroup(_ActionsContainer):\n\n def __init__(self,container,title=None ,description=None ,**kwargs):\n \n update=kwargs.setdefault\n update('conflict_handler',container.conflict_handler)\n update('prefix_chars',container.prefix_chars)\n update('argument_default',container.argument_default)\n super_init=super(_ArgumentGroup,self).__init__\n super_init(description=description,**kwargs)\n \n \n self.title=title\n self._group_actions=[]\n \n \n self._registries=container._registries\n self._actions=container._actions\n self._option_string_actions=container._option_string_actions\n self._defaults=container._defaults\n self._has_negative_number_optionals= container._has_negative_number_optionals\n self._mutually_exclusive_groups=container._mutually_exclusive_groups\n \n def _add_action(self,action):\n action=super(_ArgumentGroup,self)._add_action(action)\n self._group_actions.append(action)\n return action\n \n def _remove_action(self,action):\n super(_ArgumentGroup,self)._remove_action(action)\n self._group_actions.remove(action)\n \n \nclass _MutuallyExclusiveGroup(_ArgumentGroup):\n\n def __init__(self,container,required=False ):\n super(_MutuallyExclusiveGroup,self).__init__(container)\n self.required=required\n self._container=container\n \n def _add_action(self,action):\n if action.required:\n msg=_('mutually exclusive arguments must be optional')\n raise ValueError(msg)\n action=self._container._add_action(action)\n self._group_actions.append(action)\n return action\n \n def _remove_action(self,action):\n self._container._remove_action(action)\n self._group_actions.remove(action)\n \n \nclass ArgumentParser(_AttributeHolder,_ActionsContainer):\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n \n def __init__(self,\n prog=None ,\n usage=None ,\n description=None ,\n epilog=None ,\n parents=[],\n formatter_class=HelpFormatter,\n prefix_chars='-',\n fromfile_prefix_chars=None ,\n argument_default=None ,\n conflict_handler='error',\n add_help=True ):\n \n superinit=super(ArgumentParser,self).__init__\n superinit(description=description,\n prefix_chars=prefix_chars,\n argument_default=argument_default,\n conflict_handler=conflict_handler)\n \n \n if prog is None :\n prog=_os.path.basename(_sys.argv[0])\n \n self.prog=prog\n self.usage=usage\n self.epilog=epilog\n self.formatter_class=formatter_class\n self.fromfile_prefix_chars=fromfile_prefix_chars\n self.add_help=add_help\n \n add_group=self.add_argument_group\n self._positionals=add_group(_('positional arguments'))\n self._optionals=add_group(_('optional arguments'))\n self._subparsers=None\n \n \n def identity(string):\n return string\n self.register('type',None ,identity)\n \n \n \n default_prefix='-'if'-'in prefix_chars else prefix_chars[0]\n if self.add_help:\n self.add_argument(\n default_prefix+'h',default_prefix *2+'help',\n action='help',default=SUPPRESS,\n help=_('show this help message and exit'))\n \n \n for parent in parents:\n self._add_container_actions(parent)\n try :\n defaults=parent._defaults\n except AttributeError:\n pass\n else :\n self._defaults.update(defaults)\n \n \n \n \n def _get_kwargs(self):\n names=[\n 'prog',\n 'usage',\n 'description',\n 'formatter_class',\n 'conflict_handler',\n 'add_help',\n ]\n return [(name,getattr(self,name))for name in names]\n \n \n \n \n def add_subparsers(self,**kwargs):\n if self._subparsers is not None :\n self.error(_('cannot have multiple subparser arguments'))\n \n \n kwargs.setdefault('parser_class',type(self))\n \n if'title'in kwargs or'description'in kwargs:\n title=_(kwargs.pop('title','subcommands'))\n description=_(kwargs.pop('description',None ))\n self._subparsers=self.add_argument_group(title,description)\n else :\n self._subparsers=self._positionals\n \n \n \n if kwargs.get('prog')is None :\n formatter=self._get_formatter()\n positionals=self._get_positional_actions()\n groups=self._mutually_exclusive_groups\n formatter.add_usage(self.usage,positionals,groups,'')\n kwargs['prog']=formatter.format_help().strip()\n \n \n parsers_class=self._pop_action_class(kwargs,'parsers')\n action=parsers_class(option_strings=[],**kwargs)\n self._subparsers._add_action(action)\n \n \n return action\n \n def _add_action(self,action):\n if action.option_strings:\n self._optionals._add_action(action)\n else :\n self._positionals._add_action(action)\n return action\n \n def _get_optional_actions(self):\n return [action\n for action in self._actions\n if action.option_strings]\n \n def _get_positional_actions(self):\n return [action\n for action in self._actions\n if not action.option_strings]\n \n \n \n \n def parse_args(self,args=None ,namespace=None ):\n args,argv=self.parse_known_args(args,namespace)\n if argv:\n msg=_('unrecognized arguments: %s')\n self.error(msg %' '.join(argv))\n return args\n \n def parse_known_args(self,args=None ,namespace=None ):\n if args is None :\n \n args=_sys.argv[1:]\n else :\n \n args=list(args)\n \n \n if namespace is None :\n namespace=Namespace()\n \n \n for action in self._actions:\n if action.dest is not SUPPRESS:\n if not hasattr(namespace,action.dest):\n if action.default is not SUPPRESS:\n setattr(namespace,action.dest,action.default)\n \n \n for dest in self._defaults:\n if not hasattr(namespace,dest):\n setattr(namespace,dest,self._defaults[dest])\n \n \n try :\n namespace,args=self._parse_known_args(args,namespace)\n if hasattr(namespace,_UNRECOGNIZED_ARGS_ATTR):\n args.extend(getattr(namespace,_UNRECOGNIZED_ARGS_ATTR))\n delattr(namespace,_UNRECOGNIZED_ARGS_ATTR)\n return namespace,args\n except ArgumentError:\n err=_sys.exc_info()[1]\n self.error(str(err))\n \n def _parse_known_args(self,arg_strings,namespace):\n \n if self.fromfile_prefix_chars is not None :\n arg_strings=self._read_args_from_files(arg_strings)\n \n \n \n action_conflicts={}\n for mutex_group in self._mutually_exclusive_groups:\n group_actions=mutex_group._group_actions\n for i,mutex_action in enumerate(mutex_group._group_actions):\n conflicts=action_conflicts.setdefault(mutex_action,[])\n conflicts.extend(group_actions[:i])\n conflicts.extend(group_actions[i+1:])\n \n \n \n \n option_string_indices={}\n arg_string_pattern_parts=[]\n arg_strings_iter=iter(arg_strings)\n for i,arg_string in enumerate(arg_strings_iter):\n \n \n if arg_string =='--':\n arg_string_pattern_parts.append('-')\n for arg_string in arg_strings_iter:\n arg_string_pattern_parts.append('A')\n \n \n \n else :\n option_tuple=self._parse_optional(arg_string)\n if option_tuple is None :\n pattern='A'\n else :\n option_string_indices[i]=option_tuple\n pattern='O'\n arg_string_pattern_parts.append(pattern)\n \n \n arg_strings_pattern=''.join(arg_string_pattern_parts)\n \n \n seen_actions=set()\n seen_non_default_actions=set()\n \n def take_action(action,argument_strings,option_string=None ):\n seen_actions.add(action)\n argument_values=self._get_values(action,argument_strings)\n \n \n \n \n if argument_values is not action.default:\n seen_non_default_actions.add(action)\n for conflict_action in action_conflicts.get(action,[]):\n if conflict_action in seen_non_default_actions:\n msg=_('not allowed with argument %s')\n action_name=_get_action_name(conflict_action)\n raise ArgumentError(action,msg %action_name)\n \n \n \n if argument_values is not SUPPRESS:\n action(self,namespace,argument_values,option_string)\n \n \n def consume_optional(start_index):\n \n \n option_tuple=option_string_indices[start_index]\n action,option_string,explicit_arg=option_tuple\n \n \n \n match_argument=self._match_argument\n action_tuples=[]\n while True :\n \n \n if action is None :\n extras.append(arg_strings[start_index])\n return start_index+1\n \n \n \n if explicit_arg is not None :\n arg_count=match_argument(action,'A')\n \n \n \n \n chars=self.prefix_chars\n if arg_count ==0 and option_string[1]not in chars:\n action_tuples.append((action,[],option_string))\n char=option_string[0]\n option_string=char+explicit_arg[0]\n new_explicit_arg=explicit_arg[1:]or None\n optionals_map=self._option_string_actions\n if option_string in optionals_map:\n action=optionals_map[option_string]\n explicit_arg=new_explicit_arg\n else :\n msg=_('ignored explicit argument %r')\n raise ArgumentError(action,msg %explicit_arg)\n \n \n \n elif arg_count ==1:\n stop=start_index+1\n args=[explicit_arg]\n action_tuples.append((action,args,option_string))\n break\n \n \n \n else :\n msg=_('ignored explicit argument %r')\n raise ArgumentError(action,msg %explicit_arg)\n \n \n \n \n else :\n start=start_index+1\n selected_patterns=arg_strings_pattern[start:]\n arg_count=match_argument(action,selected_patterns)\n stop=start+arg_count\n args=arg_strings[start:stop]\n action_tuples.append((action,args,option_string))\n break\n \n \n \n assert action_tuples\n for action,args,option_string in action_tuples:\n take_action(action,args,option_string)\n return stop\n \n \n \n positionals=self._get_positional_actions()\n \n \n def consume_positionals(start_index):\n \n match_partial=self._match_arguments_partial\n selected_pattern=arg_strings_pattern[start_index:]\n arg_counts=match_partial(positionals,selected_pattern)\n \n \n \n for action,arg_count in zip(positionals,arg_counts):\n args=arg_strings[start_index:start_index+arg_count]\n start_index +=arg_count\n take_action(action,args)\n \n \n \n positionals[:]=positionals[len(arg_counts):]\n return start_index\n \n \n \n extras=[]\n start_index=0\n if option_string_indices:\n max_option_string_index=max(option_string_indices)\n else :\n max_option_string_index=-1\n while start_index <=max_option_string_index:\n \n \n next_option_string_index=min([\n index\n for index in option_string_indices\n if index >=start_index])\n if start_index !=next_option_string_index:\n positionals_end_index=consume_positionals(start_index)\n \n \n \n if positionals_end_index >start_index:\n start_index=positionals_end_index\n continue\n else :\n start_index=positionals_end_index\n \n \n \n if start_index not in option_string_indices:\n strings=arg_strings[start_index:next_option_string_index]\n extras.extend(strings)\n start_index=next_option_string_index\n \n \n start_index=consume_optional(start_index)\n \n \n stop_index=consume_positionals(start_index)\n \n \n extras.extend(arg_strings[stop_index:])\n \n \n \n required_actions=[]\n for action in self._actions:\n if action not in seen_actions:\n if action.required:\n required_actions.append(_get_action_name(action))\n else :\n \n \n \n \n if (action.default is not None and\n isinstance(action.default,str)and\n hasattr(namespace,action.dest)and\n action.default is getattr(namespace,action.dest)):\n setattr(namespace,action.dest,\n self._get_value(action,action.default))\n \n if required_actions:\n self.error(_('the following arguments are required: %s')%\n ', '.join(required_actions))\n \n \n for group in self._mutually_exclusive_groups:\n if group.required:\n for action in group._group_actions:\n if action in seen_non_default_actions:\n break\n \n \n else :\n names=[_get_action_name(action)\n for action in group._group_actions\n if action.help is not SUPPRESS]\n msg=_('one of the arguments %s is required')\n self.error(msg %' '.join(names))\n \n \n return namespace,extras\n \n def _read_args_from_files(self,arg_strings):\n \n new_arg_strings=[]\n for arg_string in arg_strings:\n \n \n if not arg_string or arg_string[0]not in self.fromfile_prefix_chars:\n new_arg_strings.append(arg_string)\n \n \n else :\n try :\n with open(arg_string[1:])as args_file:\n arg_strings=[]\n for arg_line in args_file.read().splitlines():\n for arg in self.convert_arg_line_to_args(arg_line):\n arg_strings.append(arg)\n arg_strings=self._read_args_from_files(arg_strings)\n new_arg_strings.extend(arg_strings)\n except OSError:\n err=_sys.exc_info()[1]\n self.error(str(err))\n \n \n return new_arg_strings\n \n def convert_arg_line_to_args(self,arg_line):\n return [arg_line]\n \n def _match_argument(self,action,arg_strings_pattern):\n \n nargs_pattern=self._get_nargs_pattern(action)\n match=_re.match(nargs_pattern,arg_strings_pattern)\n \n \n if match is None :\n nargs_errors={\n None :_('expected one argument'),\n OPTIONAL:_('expected at most one argument'),\n ONE_OR_MORE:_('expected at least one argument'),\n }\n default=ngettext('expected %s argument',\n 'expected %s arguments',\n action.nargs)%action.nargs\n msg=nargs_errors.get(action.nargs,default)\n raise ArgumentError(action,msg)\n \n \n return len(match.group(1))\n \n def _match_arguments_partial(self,actions,arg_strings_pattern):\n \n \n result=[]\n for i in range(len(actions),0,-1):\n actions_slice=actions[:i]\n pattern=''.join([self._get_nargs_pattern(action)\n for action in actions_slice])\n match=_re.match(pattern,arg_strings_pattern)\n if match is not None :\n result.extend([len(string)for string in match.groups()])\n break\n \n \n return result\n \n def _parse_optional(self,arg_string):\n \n if not arg_string:\n return None\n \n \n if not arg_string[0]in self.prefix_chars:\n return None\n \n \n if arg_string in self._option_string_actions:\n action=self._option_string_actions[arg_string]\n return action,arg_string,None\n \n \n if len(arg_string)==1:\n return None\n \n \n if'='in arg_string:\n option_string,explicit_arg=arg_string.split('=',1)\n if option_string in self._option_string_actions:\n action=self._option_string_actions[option_string]\n return action,option_string,explicit_arg\n \n \n \n option_tuples=self._get_option_tuples(arg_string)\n \n \n if len(option_tuples)>1:\n options=', '.join([option_string\n for action,option_string,explicit_arg in option_tuples])\n args={'option':arg_string,'matches':options}\n msg=_('ambiguous option: %(option)s could match %(matches)s')\n self.error(msg %args)\n \n \n \n elif len(option_tuples)==1:\n option_tuple,=option_tuples\n return option_tuple\n \n \n \n \n if self._negative_number_matcher.match(arg_string):\n if not self._has_negative_number_optionals:\n return None\n \n \n if' 'in arg_string:\n return None\n \n \n \n return None ,arg_string,None\n \n def _get_option_tuples(self,option_string):\n result=[]\n \n \n \n chars=self.prefix_chars\n if option_string[0]in chars and option_string[1]in chars:\n if'='in option_string:\n option_prefix,explicit_arg=option_string.split('=',1)\n else :\n option_prefix=option_string\n explicit_arg=None\n for option_string in self._option_string_actions:\n if option_string.startswith(option_prefix):\n action=self._option_string_actions[option_string]\n tup=action,option_string,explicit_arg\n result.append(tup)\n \n \n \n \n elif option_string[0]in chars and option_string[1]not in chars:\n option_prefix=option_string\n explicit_arg=None\n short_option_prefix=option_string[:2]\n short_explicit_arg=option_string[2:]\n \n for option_string in self._option_string_actions:\n if option_string ==short_option_prefix:\n action=self._option_string_actions[option_string]\n tup=action,option_string,short_explicit_arg\n result.append(tup)\n elif option_string.startswith(option_prefix):\n action=self._option_string_actions[option_string]\n tup=action,option_string,explicit_arg\n result.append(tup)\n \n \n else :\n self.error(_('unexpected option string: %s')%option_string)\n \n \n return result\n \n def _get_nargs_pattern(self,action):\n \n \n nargs=action.nargs\n \n \n if nargs is None :\n nargs_pattern='(-*A-*)'\n \n \n elif nargs ==OPTIONAL:\n nargs_pattern='(-*A?-*)'\n \n \n elif nargs ==ZERO_OR_MORE:\n nargs_pattern='(-*[A-]*)'\n \n \n elif nargs ==ONE_OR_MORE:\n nargs_pattern='(-*A[A-]*)'\n \n \n elif nargs ==REMAINDER:\n nargs_pattern='([-AO]*)'\n \n \n elif nargs ==PARSER:\n nargs_pattern='(-*A[-AO]*)'\n \n \n else :\n nargs_pattern='(-*%s-*)'%'-*'.join('A'*nargs)\n \n \n if action.option_strings:\n nargs_pattern=nargs_pattern.replace('-*','')\n nargs_pattern=nargs_pattern.replace('-','')\n \n \n return nargs_pattern\n \n \n \n \n def _get_values(self,action,arg_strings):\n \n if action.nargs not in [PARSER,REMAINDER]:\n try :\n arg_strings.remove('--')\n except ValueError:\n pass\n \n \n if not arg_strings and action.nargs ==OPTIONAL:\n if action.option_strings:\n value=action.const\n else :\n value=action.default\n if isinstance(value,str):\n value=self._get_value(action,value)\n self._check_value(action,value)\n \n \n \n elif (not arg_strings and action.nargs ==ZERO_OR_MORE and\n not action.option_strings):\n if action.default is not None :\n value=action.default\n else :\n value=arg_strings\n self._check_value(action,value)\n \n \n elif len(arg_strings)==1 and action.nargs in [None ,OPTIONAL]:\n arg_string,=arg_strings\n value=self._get_value(action,arg_string)\n self._check_value(action,value)\n \n \n elif action.nargs ==REMAINDER:\n value=[self._get_value(action,v)for v in arg_strings]\n \n \n elif action.nargs ==PARSER:\n value=[self._get_value(action,v)for v in arg_strings]\n self._check_value(action,value[0])\n \n \n else :\n value=[self._get_value(action,v)for v in arg_strings]\n for v in value:\n self._check_value(action,v)\n \n \n return value\n \n def _get_value(self,action,arg_string):\n type_func=self._registry_get('type',action.type,action.type)\n if not callable(type_func):\n msg=_('%r is not callable')\n raise ArgumentError(action,msg %type_func)\n \n \n try :\n result=type_func(arg_string)\n \n \n except ArgumentTypeError:\n name=getattr(action.type,'__name__',repr(action.type))\n msg=str(_sys.exc_info()[1])\n raise ArgumentError(action,msg)\n \n \n except (TypeError,ValueError):\n name=getattr(action.type,'__name__',repr(action.type))\n args={'type':name,'value':arg_string}\n msg=_('invalid %(type)s value: %(value)r')\n raise ArgumentError(action,msg %args)\n \n \n return result\n \n def _check_value(self,action,value):\n \n if action.choices is not None and value not in action.choices:\n args={'value':value,\n 'choices':', '.join(map(repr,action.choices))}\n msg=_('invalid choice: %(value)r (choose from %(choices)s)')\n raise ArgumentError(action,msg %args)\n \n \n \n \n def format_usage(self):\n formatter=self._get_formatter()\n formatter.add_usage(self.usage,self._actions,\n self._mutually_exclusive_groups)\n return formatter.format_help()\n \n def format_help(self):\n formatter=self._get_formatter()\n \n \n formatter.add_usage(self.usage,self._actions,\n self._mutually_exclusive_groups)\n \n \n formatter.add_text(self.description)\n \n \n for action_group in self._action_groups:\n formatter.start_section(action_group.title)\n formatter.add_text(action_group.description)\n formatter.add_arguments(action_group._group_actions)\n formatter.end_section()\n \n \n formatter.add_text(self.epilog)\n \n \n return formatter.format_help()\n \n def _get_formatter(self):\n return self.formatter_class(prog=self.prog)\n \n \n \n \n def print_usage(self,file=None ):\n if file is None :\n file=_sys.stdout\n self._print_message(self.format_usage(),file)\n \n def print_help(self,file=None ):\n if file is None :\n file=_sys.stdout\n self._print_message(self.format_help(),file)\n \n def _print_message(self,message,file=None ):\n if message:\n if file is None :\n file=_sys.stderr\n file.write(message)\n \n \n \n \n def exit(self,status=0,message=None ):\n if message:\n self._print_message(message,_sys.stderr)\n _sys.exit(status)\n \n def error(self,message):\n ''\n\n\n\n\n\n\n \n self.print_usage(_sys.stderr)\n args={'prog':self.prog,'message':message}\n self.exit(2,_('%(prog)s: error: %(message)s\\n')%args)\n"], "concurrent.futures.thread": [".py", "\n\n\n\"\"\"Implements ThreadPoolExecutor.\"\"\"\n\n__author__='Brian Quinlan (brian@sweetapp.com)'\n\nimport atexit\nfrom concurrent.futures import _base\nimport queue\nimport threading\nimport weakref\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n_threads_queues=weakref.WeakKeyDictionary()\n_shutdown=False\n\ndef _python_exit():\n global _shutdown\n _shutdown=True\n items=list(_threads_queues.items())\n for t,q in items:\n q.put(None )\n for t,q in items:\n t.join()\n \natexit.register(_python_exit)\n\nclass _WorkItem(object):\n def __init__(self,future,fn,args,kwargs):\n self.future=future\n self.fn=fn\n self.args=args\n self.kwargs=kwargs\n \n def run(self):\n if not self.future.set_running_or_notify_cancel():\n return\n \n try :\n result=self.fn(*self.args,**self.kwargs)\n except BaseException as e:\n self.future.set_exception(e)\n else :\n self.future.set_result(result)\n \ndef _worker(executor_reference,work_queue):\n try :\n while True :\n work_item=work_queue.get(block=True )\n if work_item is not None :\n work_item.run()\n \n del work_item\n continue\n executor=executor_reference()\n \n \n \n \n if _shutdown or executor is None or executor._shutdown:\n \n work_queue.put(None )\n return\n del executor\n except BaseException:\n _base.LOGGER.critical('Exception in worker',exc_info=True )\n \nclass ThreadPoolExecutor(_base.Executor):\n def __init__(self,max_workers):\n ''\n\n\n\n\n \n self._max_workers=max_workers\n self._work_queue=queue.Queue()\n self._threads=set()\n self._shutdown=False\n self._shutdown_lock=threading.Lock()\n \n def submit(self,fn,*args,**kwargs):\n with self._shutdown_lock:\n if self._shutdown:\n raise RuntimeError('cannot schedule new futures after shutdown')\n \n f=_base.Future()\n w=_WorkItem(f,fn,args,kwargs)\n \n self._work_queue.put(w)\n self._adjust_thread_count()\n return f\n submit.__doc__=_base.Executor.submit.__doc__\n \n def _adjust_thread_count(self):\n \n \n def weakref_cb(_,q=self._work_queue):\n q.put(None )\n \n \n if len(self._threads)',\n'Yury Selivanov ')\n\nimport imp\nimport importlib.machinery\nimport itertools\nimport linecache\nimport os\nimport re\nimport sys\nimport tokenize\nimport types\nimport warnings\nimport functools\nimport builtins\nfrom operator import attrgetter\nfrom collections import namedtuple,OrderedDict\n\n\n\n\ntry :\n from dis import COMPILER_FLAG_NAMES as _flag_names\nexcept ImportError:\n CO_OPTIMIZED,CO_NEWLOCALS=0x1,0x2\n CO_VARARGS,CO_VARKEYWORDS=0x4,0x8\n CO_NESTED,CO_GENERATOR,CO_NOFREE=0x10,0x20,0x40\nelse :\n mod_dict=globals()\n for k,v in _flag_names.items():\n mod_dict[\"CO_\"+v]=k\n \n \nTPFLAGS_IS_ABSTRACT=1 <<20\n\n\ndef ismodule(object):\n ''\n\n\n\n\n \n return isinstance(object,types.ModuleType)\n \ndef isclass(object):\n ''\n\n\n\n \n return isinstance(object,type)\n \ndef ismethod(object):\n ''\n\n\n\n\n\n \n return isinstance(object,types.MethodType)\n \ndef ismethoddescriptor(object):\n ''\n\n\n\n\n\n\n\n\n\n\n\n \n if isclass(object)or ismethod(object)or isfunction(object):\n \n return False\n tp=type(object)\n return hasattr(tp,\"__get__\")and not hasattr(tp,\"__set__\")\n \ndef isdatadescriptor(object):\n ''\n\n\n\n\n\n \n if isclass(object)or ismethod(object)or isfunction(object):\n \n return False\n tp=type(object)\n return hasattr(tp,\"__set__\")and hasattr(tp,\"__get__\")\n \nif hasattr(types,'MemberDescriptorType'):\n\n def ismemberdescriptor(object):\n ''\n\n\n \n return isinstance(object,types.MemberDescriptorType)\nelse :\n\n def ismemberdescriptor(object):\n ''\n\n\n \n return False\n \nif hasattr(types,'GetSetDescriptorType'):\n\n def isgetsetdescriptor(object):\n ''\n\n\n \n return isinstance(object,types.GetSetDescriptorType)\nelse :\n\n def isgetsetdescriptor(object):\n ''\n\n\n \n return False\n \ndef isfunction(object):\n ''\n\n\n\n\n\n\n\n\n \n return isinstance(object,types.FunctionType)\n \ndef isgeneratorfunction(object):\n ''\n\n\n\n \n return bool((isfunction(object)or ismethod(object))and\n object.__code__.co_flags&CO_GENERATOR)\n \ndef isgenerator(object):\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n \n return isinstance(object,types.GeneratorType)\n \ndef istraceback(object):\n ''\n\n\n\n\n\n \n return isinstance(object,types.TracebackType)\n \ndef isframe(object):\n ''\n\n\n\n\n\n\n\n\n\n \n return isinstance(object,types.FrameType)\n \ndef iscode(object):\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n return isinstance(object,types.CodeType)\n \ndef isbuiltin(object):\n ''\n\n\n\n\n \n return isinstance(object,types.BuiltinFunctionType)\n \ndef isroutine(object):\n ''\n return (isbuiltin(object)\n or isfunction(object)\n or ismethod(object)\n or ismethoddescriptor(object))\n \ndef isabstract(object):\n ''\n return bool(isinstance(object,type)and object.__flags__&TPFLAGS_IS_ABSTRACT)\n \ndef getmembers(object,predicate=None ):\n ''\n \n if isclass(object):\n mro=(object,)+getmro(object)\n else :\n mro=()\n results=[]\n for key in dir(object):\n \n \n for base in mro:\n if key in base.__dict__:\n value=base.__dict__[key]\n break\n else :\n try :\n value=getattr(object,key)\n except AttributeError:\n continue\n if not predicate or predicate(value):\n results.append((key,value))\n results.sort()\n return results\n \nAttribute=namedtuple('Attribute','name kind defining_class object')\n\ndef classify_class_attrs(cls):\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n \n mro=getmro(cls)\n names=dir(cls)\n result=[]\n for name in names:\n \n \n \n \n \n \n homecls=None\n for base in (cls,)+mro:\n if name in base.__dict__:\n obj=base.__dict__[name]\n homecls=base\n break\n else :\n obj=getattr(cls,name)\n homecls=getattr(obj,\"__objclass__\",homecls)\n \n \n if isinstance(obj,staticmethod):\n kind=\"static method\"\n elif isinstance(obj,classmethod):\n kind=\"class method\"\n elif isinstance(obj,property):\n kind=\"property\"\n elif ismethoddescriptor(obj):\n kind=\"method\"\n elif isdatadescriptor(obj):\n kind=\"data\"\n else :\n obj_via_getattr=getattr(cls,name)\n if (isfunction(obj_via_getattr)or\n ismethoddescriptor(obj_via_getattr)):\n kind=\"method\"\n else :\n kind=\"data\"\n obj=obj_via_getattr\n \n result.append(Attribute(name,kind,homecls,obj))\n \n return result\n \n \n \ndef getmro(cls):\n ''\n return cls.__mro__\n \n \ndef indentsize(line):\n ''\n expline=line.expandtabs()\n return len(expline)-len(expline.lstrip())\n \ndef getdoc(object):\n ''\n\n\n\n \n try :\n doc=object.__doc__\n except AttributeError:\n return None\n if not isinstance(doc,str):\n return None\n return cleandoc(doc)\n \ndef cleandoc(doc):\n ''\n\n\n \n try :\n lines=doc.expandtabs().split('\\n')\n except UnicodeError:\n return None\n else :\n \n margin=sys.maxsize\n for line in lines[1:]:\n content=len(line.lstrip())\n if content:\n indent=len(line)-content\n margin=min(margin,indent)\n \n if lines:\n lines[0]=lines[0].lstrip()\n if margin 0:\n if pat.match(lines[lnum]):break\n lnum=lnum -1\n return lines,lnum\n raise IOError('could not find code object')\n \ndef getcomments(object):\n ''\n\n\n \n try :\n lines,lnum=findsource(object)\n except (IOError,TypeError):\n return None\n \n if ismodule(object):\n \n start=0\n if lines and lines[0][:2]=='#!':start=1\n while start 0:\n indent=indentsize(lines[lnum])\n end=lnum -1\n if end >=0 and lines[end].lstrip()[:1]=='#'and indentsize(lines[end])==indent:\n comments=[lines[end].expandtabs().lstrip()]\n if end >0:\n end=end -1\n comment=lines[end].expandtabs().lstrip()\n while comment[:1]=='#'and indentsize(lines[end])==indent:\n comments[:0]=[comment]\n end=end -1\n if end <0:break\n comment=lines[end].expandtabs().lstrip()\n while comments and comments[0].strip()=='#':\n comments[:1]=[]\n while comments and comments[-1].strip()=='#':\n comments[-1:]=[]\n return''.join(comments)\n \nclass EndOfBlock(Exception):pass\n\nclass BlockFinder:\n ''\n def __init__(self):\n self.indent=0\n self.islambda=False\n self.started=False\n self.passline=False\n self.last=1\n \n def tokeneater(self,type,token,srowcol,erowcol,line):\n if not self.started:\n \n if token in (\"def\",\"class\",\"lambda\"):\n if token ==\"lambda\":\n self.islambda=True\n self.started=True\n self.passline=True\n elif type ==tokenize.NEWLINE:\n self.passline=False\n self.last=srowcol[0]\n if self.islambda:\n raise EndOfBlock\n elif self.passline:\n pass\n elif type ==tokenize.INDENT:\n self.indent=self.indent+1\n self.passline=True\n elif type ==tokenize.DEDENT:\n self.indent=self.indent -1\n \n \n \n if self.indent <=0:\n raise EndOfBlock\n elif self.indent ==0 and type not in (tokenize.COMMENT,tokenize.NL):\n \n \n raise EndOfBlock\n \ndef getblock(lines):\n ''\n blockfinder=BlockFinder()\n try :\n tokens=tokenize.generate_tokens(iter(lines).__next__)\n for _token in tokens:\n blockfinder.tokeneater(*_token)\n except (EndOfBlock,IndentationError):\n pass\n return lines[:blockfinder.last]\n \ndef getsourcelines(object):\n ''\n\n\n\n\n\n \n lines,lnum=findsource(object)\n \n if ismodule(object):return lines,0\n else :return getblock(lines[lnum:]),lnum+1\n \ndef getsource(object):\n ''\n\n\n\n \n lines,lnum=getsourcelines(object)\n return''.join(lines)\n \n \ndef walktree(classes,children,parent):\n ''\n results=[]\n classes.sort(key=attrgetter('__module__','__name__'))\n for c in classes:\n results.append((c,c.__bases__))\n if c in children:\n results.append(walktree(children[c],children,c))\n return results\n \ndef getclasstree(classes,unique=False ):\n ''\n\n\n\n\n\n\n \n children={}\n roots=[]\n for c in classes:\n if c.__bases__:\n for parent in c.__bases__:\n if not parent in children:\n children[parent]=[]\n if c not in children[parent]:\n children[parent].append(c)\n if unique and parent in classes:break\n elif c not in roots:\n roots.append(c)\n for parent in children:\n if parent not in classes:\n roots.append(parent)\n return walktree(roots,children,None )\n \n \nArguments=namedtuple('Arguments','args, varargs, varkw')\n\ndef getargs(co):\n ''\n\n\n\n\n \n args,varargs,kwonlyargs,varkw=_getfullargs(co)\n return Arguments(args+kwonlyargs,varargs,varkw)\n \ndef _getfullargs(co):\n ''\n\n\n\n \n \n if not iscode(co):\n raise TypeError('{!r} is not a code object'.format(co))\n \n nargs=co.co_argcount\n names=co.co_varnames\n nkwargs=co.co_kwonlyargcount\n args=list(names[:nargs])\n kwonlyargs=list(names[nargs:nargs+nkwargs])\n step=0\n \n nargs +=nkwargs\n varargs=None\n if co.co_flags&CO_VARARGS:\n varargs=co.co_varnames[nargs]\n nargs=nargs+1\n varkw=None\n if co.co_flags&CO_VARKEYWORDS:\n varkw=co.co_varnames[nargs]\n return args,varargs,kwonlyargs,varkw\n \n \nArgSpec=namedtuple('ArgSpec','args varargs keywords defaults')\n\ndef getargspec(func):\n ''\n\n\n\n\n\n\n\n\n\n\n \n \n args,varargs,varkw,defaults,kwonlyargs,kwonlydefaults,ann= getfullargspec(func)\n if kwonlyargs or ann:\n raise ValueError(\"Function has keyword-only arguments or annotations\"\n \", use getfullargspec() API which can support them\")\n return ArgSpec(args,varargs,varkw,defaults)\n \nFullArgSpec=namedtuple('FullArgSpec',\n'args, varargs, varkw, defaults, kwonlyargs, kwonlydefaults, annotations')\n\ndef getfullargspec(func):\n ''\n\n\n\n\n\n\n\n\n\n\n\n \n \n if ismethod(func):\n func=func.__func__\n if not isfunction(func):\n raise TypeError('{!r} is not a Python function'.format(func))\n args,varargs,kwonlyargs,varkw=_getfullargs(func.__code__)\n return FullArgSpec(args,varargs,varkw,func.__defaults__,\n kwonlyargs,func.__kwdefaults__,func.__annotations__)\n \nArgInfo=namedtuple('ArgInfo','args varargs keywords locals')\n\ndef getargvalues(frame):\n ''\n\n\n\n\n \n args,varargs,varkw=getargs(frame.f_code)\n return ArgInfo(args,varargs,varkw,frame.f_locals)\n \ndef formatannotation(annotation,base_module=None ):\n if isinstance(annotation,type):\n if annotation.__module__ in ('builtins',base_module):\n return annotation.__name__\n return annotation.__module__+'.'+annotation.__name__\n return repr(annotation)\n \ndef formatannotationrelativeto(object):\n module=getattr(object,'__module__',None )\n def _formatannotation(annotation):\n return formatannotation(annotation,module)\n return _formatannotation\n \n \ndef formatargspec(args,varargs=None ,varkw=None ,defaults=None ,\nkwonlyargs=(),kwonlydefaults={},annotations={},\nformatarg=str,\nformatvarargs=lambda name:'*'+name,\nformatvarkw=lambda name:'**'+name,\nformatvalue=lambda value:'='+repr(value),\nformatreturns=lambda text:' -> '+text,\nformatannotation=formatannotation):\n ''\n\n\n\n\n\n\n \n def formatargandannotation(arg):\n result=formatarg(arg)\n if arg in annotations:\n result +=': '+formatannotation(annotations[arg])\n return result\n specs=[]\n if defaults:\n firstdefault=len(args)-len(defaults)\n for i,arg in enumerate(args):\n spec=formatargandannotation(arg)\n if defaults and i >=firstdefault:\n spec=spec+formatvalue(defaults[i -firstdefault])\n specs.append(spec)\n if varargs is not None :\n specs.append(formatvarargs(formatargandannotation(varargs)))\n else :\n if kwonlyargs:\n specs.append('*')\n if kwonlyargs:\n for kwonlyarg in kwonlyargs:\n spec=formatargandannotation(kwonlyarg)\n if kwonlydefaults and kwonlyarg in kwonlydefaults:\n spec +=formatvalue(kwonlydefaults[kwonlyarg])\n specs.append(spec)\n if varkw is not None :\n specs.append(formatvarkw(formatargandannotation(varkw)))\n result='('+', '.join(specs)+')'\n if'return'in annotations:\n result +=formatreturns(formatannotation(annotations['return']))\n return result\n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \ndef _missing_arguments(f_name,argnames,pos,values):\n names=[repr(name)for name in argnames if name not in values]\n missing=len(names)\n if missing ==1:\n s=names[0]\n elif missing ==2:\n s=\"{} and {}\".format(*names)\n else :\n tail=\", {} and {}\".format(names[-2:])\n del names[-2:]\n s=\", \".join(names)+tail\n raise TypeError(\"%s() missing %i required %s argument%s: %s\"%\n (f_name,missing,\n \"positional\"if pos else\"keyword-only\",\n \"\"if missing ==1 else\"s\",s))\n \ndef _too_many(f_name,args,kwonly,varargs,defcount,given,values):\n atleast=len(args)-defcount\n kwonly_given=len([arg for arg in kwonly if arg in values])\n if varargs:\n plural=atleast !=1\n sig=\"at least %d\"%(atleast,)\n elif defcount:\n plural=True\n sig=\"from %d to %d\"%(atleast,len(args))\n else :\n plural=len(args)!=1\n sig=str(len(args))\n kwonly_sig=\"\"\n if kwonly_given:\n msg=\" positional argument%s (and %d keyword-only argument%s)\"\n kwonly_sig=(msg %(\"s\"if given !=1 else\"\",kwonly_given,\n \"s\"if kwonly_given !=1 else\"\"))\n raise TypeError(\"%s() takes %s positional argument%s but %d%s %s given\"%\n (f_name,sig,\"s\"if plural else\"\",given,kwonly_sig,\n \"was\"if given ==1 and not kwonly_given else\"were\"))\n \ndef getcallargs(func,*positional,**named):\n ''\n\n\n\n \n spec=getfullargspec(func)\n args,varargs,varkw,defaults,kwonlyargs,kwonlydefaults,ann=spec\n f_name=func.__name__\n arg2value={}\n \n \n if ismethod(func)and func.__self__ is not None :\n \n positional=(func.__self__,)+positional\n num_pos=len(positional)\n num_args=len(args)\n num_defaults=len(defaults)if defaults else 0\n \n n=min(num_pos,num_args)\n for i in range(n):\n arg2value[args[i]]=positional[i]\n if varargs:\n arg2value[varargs]=tuple(positional[n:])\n possible_kwargs=set(args+kwonlyargs)\n if varkw:\n arg2value[varkw]={}\n for kw,value in named.items():\n if kw not in possible_kwargs:\n if not varkw:\n raise TypeError(\"%s() got an unexpected keyword argument %r\"%\n (f_name,kw))\n arg2value[varkw][kw]=value\n continue\n if kw in arg2value:\n raise TypeError(\"%s() got multiple values for argument %r\"%\n (f_name,kw))\n arg2value[kw]=value\n if num_pos >num_args and not varargs:\n _too_many(f_name,args,kwonlyargs,varargs,num_defaults,\n num_pos,arg2value)\n if num_pos 0:\n start=lineno -1 -context //2\n try :\n lines,lnum=findsource(frame)\n except IOError:\n lines=index=None\n else :\n start=max(start,1)\n start=max(0,min(start,len(lines)-context))\n lines=lines[start:start+context]\n index=lineno -1 -start\n else :\n lines=index=None\n \n return Traceback(filename,lineno,frame.f_code.co_name,lines,index)\n \ndef getlineno(frame):\n ''\n \n return frame.f_lineno\n \ndef getouterframes(frame,context=1):\n ''\n\n\n \n framelist=[]\n while frame:\n framelist.append((frame,)+getframeinfo(frame,context))\n frame=frame.f_back\n return framelist\n \ndef getinnerframes(tb,context=1):\n ''\n\n\n \n framelist=[]\n while tb:\n framelist.append((tb.tb_frame,)+getframeinfo(tb,context))\n tb=tb.tb_next\n return framelist\n \ndef currentframe():\n ''\n return sys._getframe(1)if hasattr(sys,\"_getframe\")else None\n \ndef stack(context=1):\n ''\n return getouterframes(sys._getframe(1),context)\n \ndef trace(context=1):\n ''\n return getinnerframes(sys.exc_info()[2],context)\n \n \n \n \n_sentinel=object()\n\ndef _static_getmro(klass):\n return type.__dict__['__mro__'].__get__(klass)\n \ndef _check_instance(obj,attr):\n instance_dict={}\n try :\n instance_dict=object.__getattribute__(obj,\"__dict__\")\n except AttributeError:\n pass\n return dict.get(instance_dict,attr,_sentinel)\n \n \ndef _check_class(klass,attr):\n for entry in _static_getmro(klass):\n if _shadowed_dict(type(entry))is _sentinel:\n try :\n return entry.__dict__[attr]\n except KeyError:\n pass\n return _sentinel\n \ndef _is_type(obj):\n try :\n _static_getmro(obj)\n except TypeError:\n return False\n return True\n \ndef _shadowed_dict(klass):\n dict_attr=type.__dict__[\"__dict__\"]\n for entry in _static_getmro(klass):\n try :\n class_dict=dict_attr.__get__(entry)[\"__dict__\"]\n except KeyError:\n pass\n else :\n if not (type(class_dict)is types.GetSetDescriptorType and\n class_dict.__name__ ==\"__dict__\"and\n class_dict.__objclass__ is entry):\n return class_dict\n return _sentinel\n \ndef getattr_static(obj,attr,default=_sentinel):\n ''\n\n\n\n\n\n\n\n\n \n instance_result=_sentinel\n if not _is_type(obj):\n klass=type(obj)\n dict_attr=_shadowed_dict(klass)\n if (dict_attr is _sentinel or\n type(dict_attr)is types.MemberDescriptorType):\n instance_result=_check_instance(obj,attr)\n else :\n klass=obj\n \n klass_result=_check_class(klass,attr)\n \n if instance_result is not _sentinel and klass_result is not _sentinel:\n if (_check_class(type(klass_result),'__get__')is not _sentinel and\n _check_class(type(klass_result),'__set__')is not _sentinel):\n return klass_result\n \n if instance_result is not _sentinel:\n return instance_result\n if klass_result is not _sentinel:\n return klass_result\n \n if obj is klass:\n \n for entry in _static_getmro(type(klass)):\n if _shadowed_dict(type(entry))is _sentinel:\n try :\n return entry.__dict__[attr]\n except KeyError:\n pass\n if default is not _sentinel:\n return default\n raise AttributeError(attr)\n \n \n \n \nGEN_CREATED='GEN_CREATED'\nGEN_RUNNING='GEN_RUNNING'\nGEN_SUSPENDED='GEN_SUSPENDED'\nGEN_CLOSED='GEN_CLOSED'\n\ndef getgeneratorstate(generator):\n ''\n\n\n\n\n\n\n \n if generator.gi_running:\n return GEN_RUNNING\n if generator.gi_frame is None :\n return GEN_CLOSED\n if generator.gi_frame.f_lasti ==-1:\n return GEN_CREATED\n return GEN_SUSPENDED\n \n \ndef getgeneratorlocals(generator):\n ''\n\n\n\n \n \n if not isgenerator(generator):\n raise TypeError(\"'{!r}' is not a Python generator\".format(generator))\n \n frame=getattr(generator,\"gi_frame\",None )\n if frame is not None :\n return generator.gi_frame.f_locals\n else :\n return {}\n \n \n \n \n \n \n_WrapperDescriptor=type(type.__call__)\n_MethodWrapper=type(all.__call__)\n\n_NonUserDefinedCallables=(_WrapperDescriptor,\n_MethodWrapper,\ntypes.BuiltinFunctionType)\n\n\ndef _get_user_defined_method(cls,method_name):\n try :\n meth=getattr(cls,method_name)\n except AttributeError:\n return\n else :\n if not isinstance(meth,_NonUserDefinedCallables):\n \n \n return meth\n \n \ndef signature(obj):\n ''\n \n if not callable(obj):\n raise TypeError('{!r} is not a callable object'.format(obj))\n \n if isinstance(obj,types.MethodType):\n \n \n sig=signature(obj.__func__)\n return sig.replace(parameters=tuple(sig.parameters.values())[1:])\n \n try :\n sig=obj.__signature__\n except AttributeError:\n pass\n else :\n if sig is not None :\n return sig\n \n try :\n \n wrapped=obj.__wrapped__\n except AttributeError:\n pass\n else :\n return signature(wrapped)\n \n if isinstance(obj,types.FunctionType):\n return Signature.from_function(obj)\n \n if isinstance(obj,functools.partial):\n sig=signature(obj.func)\n \n new_params=OrderedDict(sig.parameters.items())\n \n partial_args=obj.args or ()\n partial_keywords=obj.keywords or {}\n try :\n ba=sig.bind_partial(*partial_args,**partial_keywords)\n except TypeError as ex:\n msg='partial object {!r} has incorrect arguments'.format(obj)\n raise ValueError(msg)from ex\n \n for arg_name,arg_value in ba.arguments.items():\n param=new_params[arg_name]\n if arg_name in partial_keywords:\n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n new_params[arg_name]=param.replace(default=arg_value,\n _partial_kwarg=True )\n \n elif (param.kind not in (_VAR_KEYWORD,_VAR_POSITIONAL)and\n not param._partial_kwarg):\n new_params.pop(arg_name)\n \n return sig.replace(parameters=new_params.values())\n \n sig=None\n if isinstance(obj,type):\n \n \n \n \n call=_get_user_defined_method(type(obj),'__call__')\n if call is not None :\n sig=signature(call)\n else :\n \n new=_get_user_defined_method(obj,'__new__')\n if new is not None :\n sig=signature(new)\n else :\n \n init=_get_user_defined_method(obj,'__init__')\n if init is not None :\n sig=signature(init)\n elif not isinstance(obj,_NonUserDefinedCallables):\n \n \n \n \n call=_get_user_defined_method(type(obj),'__call__')\n if call is not None :\n sig=signature(call)\n \n if sig is not None :\n \n \n return sig.replace(parameters=tuple(sig.parameters.values())[1:])\n \n if isinstance(obj,types.BuiltinFunctionType):\n \n msg='no signature found for builtin function {!r}'.format(obj)\n raise ValueError(msg)\n \n raise ValueError('callable {!r} is not supported by signature'.format(obj))\n \n \nclass _void:\n ''\n \n \nclass _empty:\n pass\n \n \nclass _ParameterKind(int):\n def __new__(self,*args,name):\n obj=int.__new__(self,*args)\n obj._name=name\n return obj\n \n def __str__(self):\n return self._name\n \n def __repr__(self):\n return'<_ParameterKind: {!r}>'.format(self._name)\n \n \n_POSITIONAL_ONLY=_ParameterKind(0,name='POSITIONAL_ONLY')\n_POSITIONAL_OR_KEYWORD=_ParameterKind(1,name='POSITIONAL_OR_KEYWORD')\n_VAR_POSITIONAL=_ParameterKind(2,name='VAR_POSITIONAL')\n_KEYWORD_ONLY=_ParameterKind(3,name='KEYWORD_ONLY')\n_VAR_KEYWORD=_ParameterKind(4,name='VAR_KEYWORD')\n\n\nclass Parameter:\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n \n __slots__=('_name','_kind','_default','_annotation','_partial_kwarg')\n \n POSITIONAL_ONLY=_POSITIONAL_ONLY\n POSITIONAL_OR_KEYWORD=_POSITIONAL_OR_KEYWORD\n VAR_POSITIONAL=_VAR_POSITIONAL\n KEYWORD_ONLY=_KEYWORD_ONLY\n VAR_KEYWORD=_VAR_KEYWORD\n \n empty=_empty\n \n def __init__(self,name,kind,*,default=_empty,annotation=_empty,\n _partial_kwarg=False ):\n \n if kind not in (_POSITIONAL_ONLY,_POSITIONAL_OR_KEYWORD,\n _VAR_POSITIONAL,_KEYWORD_ONLY,_VAR_KEYWORD):\n raise ValueError(\"invalid value for 'Parameter.kind' attribute\")\n self._kind=kind\n \n if default is not _empty:\n if kind in (_VAR_POSITIONAL,_VAR_KEYWORD):\n msg='{} parameters cannot have default values'.format(kind)\n raise ValueError(msg)\n self._default=default\n self._annotation=annotation\n \n if name is None :\n if kind !=_POSITIONAL_ONLY:\n raise ValueError(\"None is not a valid name for a \"\n \"non-positional-only parameter\")\n self._name=name\n else :\n name=str(name)\n if kind !=_POSITIONAL_ONLY and not name.isidentifier():\n msg='{!r} is not a valid parameter name'.format(name)\n raise ValueError(msg)\n self._name=name\n \n self._partial_kwarg=_partial_kwarg\n \n @property\n def name(self):\n return self._name\n \n @property\n def default(self):\n return self._default\n \n @property\n def annotation(self):\n return self._annotation\n \n @property\n def kind(self):\n return self._kind\n \n def replace(self,*,name=_void,kind=_void,annotation=_void,\n default=_void,_partial_kwarg=_void):\n ''\n \n if name is _void:\n name=self._name\n \n if kind is _void:\n kind=self._kind\n \n if annotation is _void:\n annotation=self._annotation\n \n if default is _void:\n default=self._default\n \n if _partial_kwarg is _void:\n _partial_kwarg=self._partial_kwarg\n \n return type(self)(name,kind,default=default,annotation=annotation,\n _partial_kwarg=_partial_kwarg)\n \n def __str__(self):\n kind=self.kind\n \n formatted=self._name\n if kind ==_POSITIONAL_ONLY:\n if formatted is None :\n formatted=''\n formatted='<{}>'.format(formatted)\n \n \n if self._annotation is not _empty:\n formatted='{}:{}'.format(formatted,\n formatannotation(self._annotation))\n \n if self._default is not _empty:\n formatted='{}={}'.format(formatted,repr(self._default))\n \n if kind ==_VAR_POSITIONAL:\n formatted='*'+formatted\n elif kind ==_VAR_KEYWORD:\n formatted='**'+formatted\n \n return formatted\n \n def __repr__(self):\n return'<{} at {:#x} {!r}>'.format(self.__class__.__name__,\n id(self),self.name)\n \n def __eq__(self,other):\n return (issubclass(other.__class__,Parameter)and\n self._name ==other._name and\n self._kind ==other._kind and\n self._default ==other._default and\n self._annotation ==other._annotation)\n \n def __ne__(self,other):\n return not self.__eq__(other)\n \n \nclass BoundArguments:\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n \n def __init__(self,signature,arguments):\n self.arguments=arguments\n self._signature=signature\n \n @property\n def signature(self):\n return self._signature\n \n @property\n def args(self):\n args=[]\n for param_name,param in self._signature.parameters.items():\n if (param.kind in (_VAR_KEYWORD,_KEYWORD_ONLY)or\n param._partial_kwarg):\n \n \n \n \n break\n \n try :\n arg=self.arguments[param_name]\n except KeyError:\n \n \n break\n else :\n if param.kind ==_VAR_POSITIONAL:\n \n args.extend(arg)\n else :\n \n args.append(arg)\n \n return tuple(args)\n \n @property\n def kwargs(self):\n kwargs={}\n kwargs_started=False\n for param_name,param in self._signature.parameters.items():\n if not kwargs_started:\n if (param.kind in (_VAR_KEYWORD,_KEYWORD_ONLY)or\n param._partial_kwarg):\n kwargs_started=True\n else :\n if param_name not in self.arguments:\n kwargs_started=True\n continue\n \n if not kwargs_started:\n continue\n \n try :\n arg=self.arguments[param_name]\n except KeyError:\n pass\n else :\n if param.kind ==_VAR_KEYWORD:\n \n kwargs.update(arg)\n else :\n \n kwargs[param_name]=arg\n \n return kwargs\n \n def __eq__(self,other):\n return (issubclass(other.__class__,BoundArguments)and\n self.signature ==other.signature and\n self.arguments ==other.arguments)\n \n def __ne__(self,other):\n return not self.__eq__(other)\n \n \nclass Signature:\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n \n __slots__=('_return_annotation','_parameters')\n \n _parameter_cls=Parameter\n _bound_arguments_cls=BoundArguments\n \n empty=_empty\n \n def __init__(self,parameters=None ,*,return_annotation=_empty,\n __validate_parameters__=True ):\n ''\n\n \n \n if parameters is None :\n params=OrderedDict()\n else :\n if __validate_parameters__:\n params=OrderedDict()\n top_kind=_POSITIONAL_ONLY\n \n for idx,param in enumerate(parameters):\n kind=param.kind\n if kind \",symbol=\"single\"):\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n return _maybe_compile(_compile,source,filename,symbol)\n \nclass Compile:\n ''\n\n\n \n def __init__(self):\n self.flags=PyCF_DONT_IMPLY_DEDENT\n \n def __call__(self,source,filename,symbol):\n codeob=compile(source,filename,symbol,self.flags,1)\n for feature in _features:\n if codeob.co_flags&feature.compiler_flag:\n self.flags |=feature.compiler_flag\n return codeob\n \nclass CommandCompiler:\n ''\n\n\n\n \n \n def __init__(self,):\n self.compiler=Compile()\n \n def __call__(self,source,filename=\"\",symbol=\"single\"):\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n return _maybe_compile(self.compiler,source,filename,symbol)\n"], "xml.sax.handler": [".py", "''\n\n\n\n\n\n\n\n\n\n\nversion='2.0beta'\n\n\n\n\n\n\n\n\n\nclass ErrorHandler:\n ''\n\n\n\n\n\n\n \n \n def error(self,exception):\n ''\n raise exception\n \n def fatalError(self,exception):\n ''\n raise exception\n \n def warning(self,exception):\n ''\n print(exception)\n \n \n \n \nclass ContentHandler:\n ''\n\n\n\n \n \n def __init__(self):\n self._locator=None\n \n def setDocumentLocator(self,locator):\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n self._locator=locator\n \n def startDocument(self):\n ''\n\n\n\n \n \n def endDocument(self):\n ''\n\n\n\n\n\n \n \n def startPrefixMapping(self,prefix,uri):\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n \n def endPrefixMapping(self,prefix):\n ''\n\n\n\n \n \n def startElement(self,name,attrs):\n ''\n\n\n\n\n \n \n def endElement(self,name):\n ''\n\n\n \n \n def startElementNS(self,name,qname,attrs):\n ''\n\n\n\n\n\n\n\n\n \n \n def endElementNS(self,name,qname):\n ''\n\n\n \n \n def characters(self,content):\n ''\n\n\n\n\n\n\n \n \n def ignorableWhitespace(self,whitespace):\n ''\n\n\n\n\n\n\n\n\n\n\n \n \n def processingInstruction(self,target,data):\n ''\n\n\n\n\n\n\n\n \n \n def skippedEntity(self,name):\n ''\n\n\n\n\n\n\n\n\n \n \n \n \n \nclass DTDHandler:\n ''\n\n\n \n \n def notationDecl(self,name,publicId,systemId):\n ''\n \n def unparsedEntityDecl(self,name,publicId,systemId,ndata):\n ''\n \n \n \n \nclass EntityResolver:\n ''\n\n\n\n \n \n def resolveEntity(self,publicId,systemId):\n ''\n\n \n return systemId\n \n \n \n \n \n \n \n \nfeature_namespaces=\"http://xml.org/sax/features/namespaces\"\n\n\n\n\n\nfeature_namespace_prefixes=\"http://xml.org/sax/features/namespace-prefixes\"\n\n\n\n\n\n\nfeature_string_interning=\"http://xml.org/sax/features/string-interning\"\n\n\n\n\n\nfeature_validation=\"http://xml.org/sax/features/validation\"\n\n\n\n\n\nfeature_external_ges=\"http://xml.org/sax/features/external-general-entities\"\n\n\n\n\nfeature_external_pes=\"http://xml.org/sax/features/external-parameter-entities\"\n\n\n\n\n\n\nall_features=[feature_namespaces,\nfeature_namespace_prefixes,\nfeature_string_interning,\nfeature_validation,\nfeature_external_ges,\nfeature_external_pes]\n\n\n\n\n\n\n\n\nproperty_lexical_handler=\"http://xml.org/sax/properties/lexical-handler\"\n\n\n\n\nproperty_declaration_handler=\"http://xml.org/sax/properties/declaration-handler\"\n\n\n\n\n\nproperty_dom_node=\"http://xml.org/sax/properties/dom-node\"\n\n\n\n\n\n\nproperty_xml_string=\"http://xml.org/sax/properties/xml-string\"\n\n\n\n\n\nproperty_encoding=\"http://www.python.org/sax/properties/encoding\"\n\n\n\n\n\n\n\n\n\n\nproperty_interning_dict=\"http://www.python.org/sax/properties/interning-dict\"\n\n\n\n\n\n\n\nall_properties=[property_lexical_handler,\nproperty_dom_node,\nproperty_declaration_handler,\nproperty_xml_string,\nproperty_encoding,\nproperty_interning_dict]\n"], "random": [".js", "// Javascript implementation of the random module\n// Based on Ian Bicking's implementation of the Mersenne twister\n\nvar $module = (function($B){\n\n_b_ = $B.builtins\n\nvar VERSION = 3\n\n// Code copied from https://github.com/ianb/whrandom/blob/master/mersenne.js\n// by Ian Bicking\n\n// this program is a JavaScript version of Mersenne Twister,\n// a straight conversion from the original program, mt19937ar.c,\n// translated by y. okada on july 17, 2006.\n// and modified a little at july 20, 2006, but there are not any substantial differences.\n// modularized by Ian Bicking, March 25, 2013 (found original version at http://www.math.sci.hiroshima-u.ac.jp/~m-mat/MT/VERSIONS/JAVASCRIPT/java-script.html)\n// in this program, procedure descriptions and comments of original source code were not removed.\n// lines commented with //c// were originally descriptions of c procedure. and a few following lines are appropriate JavaScript descriptions.\n// lines commented with /* and */ are original comments.\n// lines commented with // are additional comments in this JavaScript version.\n/*\n A C-program for MT19937, with initialization improved 2002/1/26.\n Coded by Takuji Nishimura and Makoto Matsumoto.\n\n Before using, initialize the state by using init_genrand(seed)\n or init_by_array(init_key, key_length).\n\n Copyright (C) 1997 - 2002, Makoto Matsumoto and Takuji Nishimura,\n All rights reserved.\n\n Redistribution and use in source and binary forms, with or without\n modification, are permitted provided that the following conditions\n are met:\n\n 1. Redistributions of source code must retain the above copyright\n notice, this list of conditions and the following disclaimer.\n\n 2. Redistributions in binary form must reproduce the above copyright\n notice, this list of conditions and the following disclaimer in the\n documentation and/or other materials provided with the distribution.\n\n 3. The names of its contributors may not be used to endorse or promote\n products derived from this software without specific prior written\n permission.\n\n THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS\n \"AS IS\" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT\n LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR\n A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR\n CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,\n EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,\n PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR\n PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF\n LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING\n NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS\n SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.\n\n\n Any feedback is very welcome.\n http://www.math.sci.hiroshima-u.ac.jp/~m-mat/MT/emt.html\n email: m-mat @ math.sci.hiroshima-u.ac.jp (remove space)\n*/\n\nfunction RandomStream(seed) {\n\n /*jshint bitwise:false */\n /* Period parameters */\n //c//#define N 624\n //c//#define M 397\n //c//#define MATRIX_A 0x9908b0dfUL /* constant vector a */\n //c//#define UPPER_MASK 0x80000000UL /* most significant w-r bits */\n //c//#define LOWER_MASK 0x7fffffffUL /* least significant r bits */\n var N = 624;\n var M = 397;\n var MATRIX_A = 0x9908b0df; /* constant vector a */\n var UPPER_MASK = 0x80000000; /* most significant w-r bits */\n var LOWER_MASK = 0x7fffffff; /* least significant r bits */\n //c//static unsigned long mt[N]; /* the array for the state vector */\n //c//static int mti=N+1; /* mti==N+1 means mt[N] is not initialized */\n var mt = new Array(N); /* the array for the state vector */\n var mti = N+1; /* mti==N+1 means mt[N] is not initialized */\n\n function unsigned32 (n1) // returns a 32-bits unsiged integer from an operand to which applied a bit operator.\n {\n return n1 < 0 ? (n1 ^ UPPER_MASK) + UPPER_MASK : n1;\n }\n\n function subtraction32 (n1, n2) // emulates lowerflow of a c 32-bits unsiged integer variable, instead of the operator -. these both arguments must be non-negative integers expressible using unsigned 32 bits.\n {\n return n1 < n2 ? unsigned32((0x100000000 - (n2 - n1)) & 0xffffffff) : n1 - n2;\n }\n\n function addition32 (n1, n2) // emulates overflow of a c 32-bits unsiged integer variable, instead of the operator +. these both arguments must be non-negative integers expressible using unsigned 32 bits.\n {\n return unsigned32((n1 + n2) & 0xffffffff);\n }\n\n function multiplication32 (n1, n2) // emulates overflow of a c 32-bits unsiged integer variable, instead of the operator *. these both arguments must be non-negative integers expressible using unsigned 32 bits.\n {\n var sum = 0;\n for (var i = 0; i < 32; ++i){\n if ((n1 >>> i) & 0x1){\n sum = addition32(sum, unsigned32(n2 << i));\n }\n }\n return sum;\n }\n\n /* initializes mt[N] with a seed */\n //c//void init_genrand(unsigned long s)\n function init_genrand(s) {\n //c//mt[0]= s & 0xffffffff;\n mt[0]= unsigned32(s & 0xffffffff);\n for (mti=1; mti> 30)) + mti);\n addition32(multiplication32(1812433253, unsigned32(mt[mti-1] ^ (mt[mti-1] >>> 30))), mti);\n /* See Knuth TAOCP Vol2. 3rd Ed. P.106 for multiplier. */\n /* In the previous versions, MSBs of the seed affect */\n /* only MSBs of the array mt[]. */\n /* 2002/01/09 modified by Makoto Matsumoto */\n //c//mt[mti] &= 0xffffffff;\n mt[mti] = unsigned32(mt[mti] & 0xffffffff);\n /* for >32 bit machines */\n }\n }\n\n /* initialize by an array with array-length */\n /* init_key is the array for initializing keys */\n /* key_length is its length */\n /* slight change for C++, 2004/2/26 */\n //c//void init_by_array(unsigned long init_key[], int key_length)\n function init_by_array(init_key, key_length) {\n //c//int i, j, k;\n var i, j, k;\n init_genrand(19650218);\n i=1; j=0;\n k = (N>key_length ? N : key_length);\n for (; k; k--) {\n //c//mt[i] = (mt[i] ^ ((mt[i-1] ^ (mt[i-1] >> 30)) * 1664525))\n //c// + init_key[j] + j; /* non linear */\n mt[i] = addition32(addition32(unsigned32(mt[i] ^ multiplication32(unsigned32(mt[i-1] ^ (mt[i-1] >>> 30)), 1664525)), init_key[j]), j);\n mt[i] =\n //c//mt[i] &= 0xffffffff; /* for WORDSIZE > 32 machines */\n unsigned32(mt[i] & 0xffffffff);\n i++; j++;\n if (i>=N) { mt[0] = mt[N-1]; i=1; }\n if (j>=key_length) {\n j=0;\n }\n }\n for (k=N-1; k; k--) {\n //c//mt[i] = (mt[i] ^ ((mt[i-1] ^ (mt[i-1] >> 30)) * 1566083941))\n //c//- i; /* non linear */\n mt[i] = subtraction32(unsigned32((mt[i]) ^ multiplication32(unsigned32(mt[i-1] ^ (mt[i-1] >>> 30)), 1566083941)), i);\n //c//mt[i] &= 0xffffffff; /* for WORDSIZE > 32 machines */\n mt[i] = unsigned32(mt[i] & 0xffffffff);\n i++;\n if (i>=N) { mt[0] = mt[N-1]; i=1; }\n }\n mt[0] = 0x80000000; /* MSB is 1; assuring non-zero initial array */\n }\n\n /* generates a random number on [0,0xffffffff]-interval */\n //c//unsigned long genrand_int32(void)\n function genrand_int32() {\n //c//unsigned long y;\n //c//static unsigned long mag01[2]={0x0UL, MATRIX_A};\n var y;\n var mag01 = [0x0, MATRIX_A];\n /* mag01[x] = x * MATRIX_A for x=0,1 */\n\n if (mti >= N) { /* generate N words at one time */\n //c//int kk;\n var kk;\n\n if (mti == N+1) { /* if init_genrand() has not been called, */\n init_genrand(Date.now()); /* a default initial seed is used */\n }\n\n for (kk=0;kk> 1) ^ mag01[y & 0x1];\n y = unsigned32((mt[kk]&UPPER_MASK)|(mt[kk+1]&LOWER_MASK));\n mt[kk] = unsigned32(mt[kk+M] ^ (y >>> 1) ^ mag01[y & 0x1]);\n }\n for (;kk> 1) ^ mag01[y & 0x1];\n y = unsigned32((mt[kk]&UPPER_MASK)|(mt[kk+1]&LOWER_MASK));\n mt[kk] = unsigned32(mt[kk+(M-N)] ^ (y >>> 1) ^ mag01[y & 0x1]);\n }\n //c//y = (mt[N-1]&UPPER_MASK)|(mt[0]&LOWER_MASK);\n //c//mt[N-1] = mt[M-1] ^ (y >> 1) ^ mag01[y & 0x1];\n y = unsigned32((mt[N-1]&UPPER_MASK)|(mt[0]&LOWER_MASK));\n mt[N-1] = unsigned32(mt[M-1] ^ (y >>> 1) ^ mag01[y & 0x1]);\n mti = 0;\n }\n\n y = mt[mti++];\n\n /* Tempering */\n //c//y ^= (y >> 11);\n //c//y ^= (y << 7) & 0x9d2c5680;\n //c//y ^= (y << 15) & 0xefc60000;\n //c//y ^= (y >> 18);\n y = unsigned32(y ^ (y >>> 11));\n y = unsigned32(y ^ ((y << 7) & 0x9d2c5680));\n y = unsigned32(y ^ ((y << 15) & 0xefc60000));\n y = unsigned32(y ^ (y >>> 18));\n\n return y;\n }\n\n /* generates a random number on [0,0x7fffffff]-interval */\n //c//long genrand_int31(void)\n function genrand_int31() {\n //c//return (genrand_int32()>>1);\n return (genrand_int32()>>>1);\n }\n\n /* generates a random number on [0,1]-real-interval */\n //c//double genrand_real1(void)\n function genrand_real1() {\n return genrand_int32()*(1.0/4294967295.0);\n /* divided by 2^32-1 */\n }\n\n /* generates a random number on [0,1)-real-interval */\n //c//double genrand_real2(void)\n function genrand_real2() {\n return genrand_int32()*(1.0/4294967296.0);\n /* divided by 2^32 */\n }\n\n /* generates a random number on (0,1)-real-interval */\n //c//double genrand_real3(void)\n function genrand_real3() {\n return ((genrand_int32()) + 0.5)*(1.0/4294967296.0);\n /* divided by 2^32 */\n }\n\n /* generates a random number on [0,1) with 53-bit resolution*/\n //c//double genrand_res53(void)\n function genrand_res53() {\n //c//unsigned long a=genrand_int32()>>5, b=genrand_int32()>>6;\n var a=genrand_int32()>>>5, b=genrand_int32()>>>6;\n return (a*67108864.0+b)*(1.0/9007199254740992.0);\n }\n /* These real versions are due to Isaku Wada, 2002/01/09 added */\n\n var random = genrand_res53;\n\n random.seed = function (seed) {\n if (! seed) {\n seed = Date.now();\n }\n if (typeof seed != \"number\") {\n seed = parseInt(seed, 10);\n }\n if ((seed !== 0 && ! seed) || isNaN(seed)) {\n throw \"Bad seed\";\n }\n init_genrand(seed);\n };\n\n random.seed(seed);\n\n random.int31 = genrand_int31;\n random.real1 = genrand_real1;\n random.real2 = genrand_real2;\n random.real3 = genrand_real3;\n random.res53 = genrand_res53;\n \n // Added for compatibility with Python\n random.getstate = function(){return [VERSION, mt, mti]}\n \n random.setstate = function(state){\n mt = state[1]\n mti = state[2]\n }\n\n return random;\n\n}\n\nfunction _Random(){\n var _random = RandomStream()\n \n _b_ = $B.builtins\n \n var NV_MAGICCONST = 4 * Math.exp(-0.5)/Math.sqrt(2),\n gauss_next = null\n \n function _randbelow(x){\n return Math.floor(x*_random())\n }\n \n function _urandom(n){\n /*\n urandom(n) -> str\n Return n random bytes suitable for cryptographic use.\n */\n \n var randbytes= []\n for(i=0;i 0 and beta > 0.\n \n The probability distribution function is:\n \n x ** (alpha - 1) * math.exp(-x / beta)\n pdf(x) = --------------------------------------\n math.gamma(alpha) * beta ** alpha\n \n */\n \n // alpha > 0, beta > 0, mean is alpha*beta, variance is alpha*beta**2\n \n // Warning: a few older sources define the gamma distribution in terms\n // of alpha > -1.0\n \n var $ = $B.args('gammavariate', 2,\n {alpha:null, beta:null}, ['alpha', 'beta'],\n arguments, {}, null, null),\n alpha = $.alpha,\n beta = $.beta,\n LOG4 = Math.log(4),\n SG_MAGICCONST = 1.0 + Math.log(4.5)\n \n if(alpha <= 0.0 || beta <= 0.0){\n throw _b_.ValueError('gammavariate: alpha and beta must be > 0.0')\n }\n \n if(alpha > 1.0){\n \n // Uses R.C.H. Cheng, \"The generation of Gamma\n // variables with non-integral shape parameters\",\n // Applied Statistics, (1977), 26, No. 1, p71-74\n \n var ainv = Math.sqrt(2.0 * alpha - 1.0),\n bbb = alpha - LOG4,\n ccc = alpha + ainv\n \n while(true){\n var u1 = _random()\n if(!((1e-7 < u1) && (u1 < .9999999))){\n continue\n }\n var u2 = 1.0 - _random(),\n v = Math.log(u1/(1.0-u1))/ainv,\n x = alpha*Math.exp(v),\n z = u1*u1*u2,\n r = bbb+ccc*v-x\n if((r + SG_MAGICCONST - 4.5*z >= 0.0) || r >= Math.log(z)){\n return x * beta\n }\n }\n }else if(alpha == 1.0){\n // expovariate(1)\n var u = _random()\n while(u <= 1e-7){u = _random()}\n return -Math.log(u) * beta\n }else{\n // alpha is between 0 and 1 (exclusive)\n \n // Uses ALGORITHM GS of Statistical Computing - Kennedy & Gentle\n \n while(true){\n var u = _random(),\n b = (Math.E + alpha)/Math.E,\n p = b*u,\n x\n if(p <= 1.0){x = Math.pow(p, (1.0/alpha))}\n else{x = -Math.log((b-p)/alpha)}\n var u1 = _random()\n if(p > 1.0){\n if(u1 <= Math.pow(x, alpha - 1.0)){\n break\n }\n }else if(u1 <= Math.exp(-x)){\n break\n }\n }\n return x * beta\n }\n },\n \n gauss:function(){\n \n /* Gaussian distribution.\n \n mu is the mean, and sigma is the standard deviation. This is\n slightly faster than the normalvariate() function.\n \n Not thread-safe without a lock around calls.\n \n # When x and y are two variables from [0, 1), uniformly\n # distributed, then\n #\n # cos(2*pi*x)*sqrt(-2*log(1-y))\n # sin(2*pi*x)*sqrt(-2*log(1-y))\n #\n # are two *independent* variables with normal distribution\n # (mu = 0, sigma = 1).\n # (Lambert Meertens)\n # (corrected version; bug discovered by Mike Miller, fixed by LM)\n \n # Multithreading note: When two threads call this function\n # simultaneously, it is possible that they will receive the\n # same return value. The window is very small though. To\n # avoid this, you have to use a lock around all calls. (I\n # didn't want to slow this down in the serial case by using a\n # lock here.)\n */\n \n var $ = $B.args('gauss', 2, {mu:null, sigma:null},\n ['mu', 'sigma'], arguments, {}, null, null),\n mu = $.mu,\n sigma = $.sigma\n \n var z = gauss_next\n gauss_next = null\n if(z===null){\n var x2pi = _random() * Math.PI * 2,\n g2rad = Math.sqrt(-2.0 * Math.log(1.0 - _random())),\n z = Math.cos(x2pi) * g2rad\n gauss_next = Math.sin(x2pi) * g2rad\n }\n return mu + z*sigma\n },\n \n getrandbits: function(k){\n var $ = $B.args('getrandbits', 1,\n {k:null},['k'],arguments, {}, null, null),\n k = $B.$GetInt($.k)\n // getrandbits(k) -> x. Generates a long int with k random bits.\n if(k <= 0){\n throw _b_.ValueError('number of bits must be greater than zero')\n }\n if(k != _b_.int(k)){\n throw _b_.TypeError('number of bits should be an integer')\n }\n var numbytes = (k + 7), // bits / 8 and rounded up\n x = _b_.int.$dict.from_bytes(_urandom(numbytes), 'big')\n return _b_.getattr(x, '__rshift__')(\n _b_.getattr(numbytes*8,'__sub__')(k))\n },\n \n getstate: function(){\n // Return internal state; can be passed to setstate() later.\n var $ = $B.args('getstate', 0, {}, [], arguments, {}, null, null)\n return _random.getstate()\n },\n \n normalvariate: function(mu, sigma){\n /*\n Normal distribution.\n \n mu is the mean, and sigma is the standard deviation.\n \n */\n \n // mu = mean, sigma = standard deviation\n \n // Uses Kinderman and Monahan method. Reference: Kinderman,\n // A.J. and Monahan, J.F., \"Computer generation of random\n // variables using the ratio of uniform deviates\", ACM Trans\n // Math Software, 3, (1977), pp257-260.\n \n var $=$B.args('normalvariate', 2,\n {mu:null, sigma:null}, ['mu', 'sigma'],\n arguments, {}, null, null),\n mu = $.mu,\n sigma = $.sigma\n \n while(true){\n var u1 = _random(),\n u2 = 1.0 - _random(),\n z = NV_MAGICCONST*(u1-0.5)/u2,\n zz = z*z/4.0\n if(zz <= -Math.log(u2)){break}\n }\n return mu + z*sigma\n },\n \n paretovariate: function(){\n /* Pareto distribution. alpha is the shape parameter.*/\n // Jain, pg. 495\n \n var $ = $B.args('paretovariate', 1, {alpha:null}, ['alpha'],\n arguments, {}, null, null)\n \n var u = 1 - _random()\n return 1 / Math.pow(u,1/$.alpha)\n },\n \n randint: function(a, b){\n var $ = $B.args('randint', 2,\n {a:null, b:null},\n ['a', 'b'],\n arguments, {}, null, null)\n return parseInt(_random()*($.b-$.a+1)+$.a)\n },\n \n random: _random,\n \n randrange: function(){\n var $ = $B.args('randrange', 3,\n {x:null, stop:null, step:null},\n ['x', 'stop', 'step'],\n arguments, {stop:null, step:null}, null, null)\n if($.stop===null){\n var start = 0, stop = $.x, step = 1\n }else{\n var start = $.x, stop = $.stop, \n step = $.step===null ? 1 : $.step\n if(step==0){throw _b_.ValueError('step cannot be 0')}\n }\n if(typeof start=='number' && typeof stop == 'number' &&\n typeof step=='number'){\n return start+step*Math.floor(_random()*Math.floor((stop-start)/step))\n }else{\n var d = _b_.getattr(stop,'__sub__')(start)\n d = _b_.getattr(d, '__floordiv__')(step)\n // Force d to be a LongInt\n d = $B.LongInt(d)\n // d is a long integer with n digits ; to choose a random number\n // between 0 and d the most simple is to take a random digit\n // at each position, except the first one\n var s = d.value, _len = s.length,\n res = Math.floor(_random()*(parseInt(s.charAt(0))+(_len==1 ? 0 : 1)))+''\n var same_start = res.charAt(0)==s.charAt(0)\n for(var i=1;i<_len;i++){\n if(same_start){\n // If it's the last digit, don't allow stop as valid\n if(i==_len-1){\n res += Math.floor(_random()*parseInt(s.charAt(i)))+''\n }else{\n res += Math.floor(_random()*(parseInt(s.charAt(i))+1))+''\n same_start = res.charAt(i)==s.charAt(i)\n }\n }else{\n res += Math.floor(_random()*10)+''\n }\n }\n var offset = {__class__:$B.LongInt.$dict, value: res, \n pos: true}\n d = _b_.getattr(step, '__mul__')(offset)\n d = _b_.getattr(start, '__add__')(d)\n return _b_.int(d)\n }\n },\n \n sample: function(){\n /*\n Chooses k unique random elements from a population sequence or set.\n \n Returns a new list containing elements from the population while\n leaving the original population unchanged. The resulting list is\n in selection order so that all sub-slices will also be valid random\n samples. This allows raffle winners (the sample) to be partitioned\n into grand prize and second place winners (the subslices).\n \n Members of the population need not be hashable or unique. If the\n population contains repeats, then each occurrence is a possible\n selection in the sample.\n \n To choose a sample in a range of integers, use range as an argument.\n This is especially fast and space efficient for sampling from a\n large population: sample(range(10000000), 60)\n \n # Sampling without replacement entails tracking either potential\n # selections (the pool) in a list or previous selections in a set.\n \n # When the number of selections is small compared to the\n # population, then tracking selections is efficient, requiring\n # only a small set and an occasional reselection. For\n # a larger number of selections, the pool tracking method is\n # preferred since the list takes less space than the\n # set and it doesn't suffer from frequent reselections.'\n \n */\n var $ = $B.args('sample',2,{population:null,k:null},\n ['population','k'], arguments,{},null,null),\n population = $.population,\n k = $.k\n \n if(!_b_.hasattr(population, '__len__')){\n throw _b_.TypeError(\"Population must be a sequence or set. For dicts, use list(d).\")\n }\n var n = _b_.getattr(population, '__len__')()\n \n if(k<0 || k>n){\n throw _b_.ValueError(\"Sample larger than population\")\n }\n var result = [],\n setsize = 21 // size of a small set minus size of an empty list\n if(k > 5){\n setsize += Math.pow(4, Math.ceil(Math.log(k * 3, 4))) // table size for big sets\n }\n if(n <= setsize){\n // An n-length list is smaller than a k-length set\n if(Array.isArray(population)){\n var pool = population.slice()\n }else{var pool = _b_.list(population)}\n for(var i=0;istate.length){\n throw _b_.ValueError(\"too many values to unpack (expected \"+\n state.length+\")\")\n }\n if($.state[0]!=3){\n throw _b_.ValueError(\"ValueError: state with version \"+\n $.state[0]+\" passed to Random.setstate() of version 3\")\n }\n var second = _b_.list($.state[1])\n if(second.length!==state[1].length){\n throw _b_.ValueError('state vector is the wrong size')\n }\n for(var i=0;i shuffle list x in place; return None.\n \n Optional arg random is a 0-argument function returning a random\n float in [0.0, 1.0); by default, the standard random.random.\n */\n \n var $ = $B.args('shuffle',2,{x:null,random:null},\n ['x','random'],\n arguments,{random:null},null,null),\n x = $.x,\n random = $.random\n \n if(random===null){random=_random}\n \n if(Array.isArray(x)){\n for(var i=x.length-1;i>=0;i--){\n var j = Math.floor(random() * (i+1)),\n temp = x[j]\n x[j] = x[i]\n x[i] = temp\n }\n }else{\n var len = _b_.getattr(x, '__len__')(), temp,\n x_get = _b_.getattr(x, '__getitem__'),\n x_set = _b_.getattr(x, '__setitem__')\n \n for(i=len-1;i>=0;i--){\n var j = Math.floor(random() * (i+1)),\n temp = x_get(j)\n x_set(j, x_get(i))\n x_set(i, temp)\n }\n }\n },\n \n triangular: function(){\n /*\n Triangular distribution.\n \n Continuous distribution bounded by given lower and upper limits,\n and having a given mode value in-between.\n \n http://en.wikipedia.org/wiki/Triangular_distribution\n */\n var $=$B.args('triangular',3,\n {low:null, high:null, mode:null},\n ['low', 'high', 'mode'],\n arguments,{low:0, high:1, mode:null}, null, null),\n low = $.low,\n high = $.high,\n mode = $.mode\n \n var u = _random(),\n c = mode===null ? 0.5 : (mode - low) / (high - low)\n if(u > c){\n u = 1 - u\n c = 1 - c\n var temp = low\n low = high\n high = temp\n }\n return low + (high - low) * Math.pow(u * c, 0.5)\n },\n \n uniform: function(){\n var $ = $B.args('uniform',2,{a:null,b:null},['a','b'],\n arguments,{},null,null),\n a = $B.$GetInt($.a),\n b = $B.$GetInt($.b)\n \n return a + (b-a)*_random()\n },\n \n vonmisesvariate: function(mu, kappa){\n /* Circular data distribution.\n \n mu is the mean angle, expressed in radians between 0 and 2*pi, and\n kappa is the concentration parameter, which must be greater than or\n equal to zero. If kappa is equal to zero, this distribution reduces\n to a uniform random angle over the range 0 to 2*pi.\n \n */\n // mu: mean angle (in radians between 0 and 2*pi)\n // kappa: concentration parameter kappa (>= 0)\n // if kappa = 0 generate uniform random angle\n \n // Based upon an algorithm published in: Fisher, N.I.,\n // \"Statistical Analysis of Circular Data\", Cambridge\n // University Press, 1993.\n \n // Thanks to Magnus Kessler for a correction to the\n // implementation of step 4.\n \n var $=$B.args('vonmisesvariate', 2,\n {mu: null, kappa:null}, ['mu', 'kappa'],\n arguments, {}, null, null),\n mu = $.mu,\n kappa = $.kappa,\n TWOPI = 2*Math.PI\n \n if(kappa <= 1e-6){return TWOPI * _random()}\n \n var s = 0.5 / kappa,\n r = s + Math.sqrt(1.0 + s * s)\n \n while(true){\n var u1 = _random(),\n z = Math.cos(Math.PI * u1),\n d = z / (r + z),\n u2 = _random()\n if((u2 < 1.0 - d * d) || \n (u2 <= (1.0 - d) * Math.exp(d))){\n break\n }\n }\n var q = 1.0 / r,\n f = (q + z) / (1.0 + q * z),\n u3 = _random()\n if(u3 > 0.5){var theta = (mu + Math.acos(f)) % TWOPI}\n else{var theta = (mu - Math.acos(f)) % TWOPI}\n return theta\n },\n \n weibullvariate: function(){\n /*Weibull distribution.\n \n alpha is the scale parameter and beta is the shape parameter.\n \n */\n // Jain, pg. 499; bug fix courtesy Bill Arms\n \n var $ = $B.args('weibullvariate', 2, {alpha:null, beta:null},\n ['alpha', 'beta'], arguments, {}, null, null),\n alpha = $.alpha,\n beta = $.beta\n \n var u = 1 - _random()\n return alpha * Math.pow(-Math.log(u), 1/beta)\n },\n \n VERSION: VERSION\n }\n\n res.lognormvariate = function(){\n /*\n Log normal distribution.\n \n If you take the natural logarithm of this distribution, you'll get a\n normal distribution with mean mu and standard deviation sigma.\n mu can have any value, and sigma must be greater than zero.\n \n */\n \n return Math.exp(res.normalvariate.apply(null, arguments))\n }\n \n res.betavariate = function(){\n /* Beta distribution.\n \n Conditions on the parameters are alpha > 0 and beta > 0.\n Returned values range between 0 and 1.\n \n \n # This version due to Janne Sinkkonen, and matches all the std\n # texts (e.g., Knuth Vol 2 Ed 3 pg 134 \"the beta distribution\").\n */\n \n var $ = $B.args('betavariate', 2, {alpha:null, beta:null},\n ['alpha', 'beta'], arguments, {}, null, null),\n alpha = $.alpha,\n beta = $.beta\n \n var y = res.gammavariate(alpha, 1)\n if(y == 0){return _b_.float(0)}\n else{return y / (y + res.gammavariate(beta, 1))}\n }\n \n return res\n\n}\n\nfunction Random(){\n var obj = {__class__: Random.$dict}\n Random.$dict.__init__(obj)\n return obj\n}\nRandom.__class__ = $B.$factory\nRandom.$dict = {\n __class__: $B.$type,\n __name__: 'Random',\n $factory: Random,\n __init__: function(self){self.$r = _Random()},\n __getattribute__: function(self, attr){return self.$r[attr]}\n}\nRandom.$dict.__mro__ = [$B.builtins.object.$dict]\n\nvar $module = _Random()\n\n$module.Random = Random\n\n$module.SystemRandom = function(){\n var f = function(){return {__class__:f.$dict}}\n f.__class__ = $B.$factory\n f.$dict = {\n __class__: $B.$type,\n __name__: 'SystemRandom',\n $factory: f,\n __getattribute__: function(){\n throw $B.builtins.NotImplementedError()\n }\n }\n f.$dict.__mro__ = [$B.builtins.object.$dict]\n return f()\n}\n\nreturn $module\n\n})(__BRYTHON__)\n\n"], "xml.dom.minidom": [".py", "''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\nimport io\nimport xml.dom\n\nfrom xml.dom import EMPTY_NAMESPACE,EMPTY_PREFIX,XMLNS_NAMESPACE,domreg\nfrom xml.dom.minicompat import *\nfrom xml.dom.xmlbuilder import DOMImplementationLS,DocumentLS\n\n\n\n\n\n\n_nodeTypes_with_children=(xml.dom.Node.ELEMENT_NODE,\nxml.dom.Node.ENTITY_REFERENCE_NODE)\n\n\nclass Node(xml.dom.Node):\n namespaceURI=None\n parentNode=None\n ownerDocument=None\n nextSibling=None\n previousSibling=None\n \n prefix=EMPTY_PREFIX\n \n def __bool__(self):\n return True\n \n def toxml(self,encoding=None ):\n return self.toprettyxml(\"\",\"\",encoding)\n \n def toprettyxml(self,indent=\"\\t\",newl=\"\\n\",encoding=None ):\n if encoding is None :\n writer=io.StringIO()\n else :\n writer=io.TextIOWrapper(io.BytesIO(),\n encoding=encoding,\n errors=\"xmlcharrefreplace\",\n newline='\\n')\n if self.nodeType ==Node.DOCUMENT_NODE:\n \n self.writexml(writer,\"\",indent,newl,encoding)\n else :\n self.writexml(writer,\"\",indent,newl)\n if encoding is None :\n return writer.getvalue()\n else :\n return writer.detach().getvalue()\n \n def hasChildNodes(self):\n return bool(self.childNodes)\n \n def _get_childNodes(self):\n return self.childNodes\n \n def _get_firstChild(self):\n if self.childNodes:\n return self.childNodes[0]\n \n def _get_lastChild(self):\n if self.childNodes:\n return self.childNodes[-1]\n \n def insertBefore(self,newChild,refChild):\n if newChild.nodeType ==self.DOCUMENT_FRAGMENT_NODE:\n for c in tuple(newChild.childNodes):\n self.insertBefore(c,refChild)\n \n return newChild\n if newChild.nodeType not in self._child_node_types:\n raise xml.dom.HierarchyRequestErr(\n \"%s cannot be child of %s\"%(repr(newChild),repr(self)))\n if newChild.parentNode is not None :\n newChild.parentNode.removeChild(newChild)\n if refChild is None :\n self.appendChild(newChild)\n else :\n try :\n index=self.childNodes.index(refChild)\n except ValueError:\n raise xml.dom.NotFoundErr()\n if newChild.nodeType in _nodeTypes_with_children:\n _clear_id_cache(self)\n self.childNodes.insert(index,newChild)\n newChild.nextSibling=refChild\n refChild.previousSibling=newChild\n if index:\n node=self.childNodes[index -1]\n node.nextSibling=newChild\n newChild.previousSibling=node\n else :\n newChild.previousSibling=None\n newChild.parentNode=self\n return newChild\n \n def appendChild(self,node):\n if node.nodeType ==self.DOCUMENT_FRAGMENT_NODE:\n for c in tuple(node.childNodes):\n self.appendChild(c)\n \n return node\n if node.nodeType not in self._child_node_types:\n raise xml.dom.HierarchyRequestErr(\n \"%s cannot be child of %s\"%(repr(node),repr(self)))\n elif node.nodeType in _nodeTypes_with_children:\n _clear_id_cache(self)\n if node.parentNode is not None :\n node.parentNode.removeChild(node)\n _append_child(self,node)\n node.nextSibling=None\n return node\n \n def replaceChild(self,newChild,oldChild):\n if newChild.nodeType ==self.DOCUMENT_FRAGMENT_NODE:\n refChild=oldChild.nextSibling\n self.removeChild(oldChild)\n return self.insertBefore(newChild,refChild)\n if newChild.nodeType not in self._child_node_types:\n raise xml.dom.HierarchyRequestErr(\n \"%s cannot be child of %s\"%(repr(newChild),repr(self)))\n if newChild is oldChild:\n return\n if newChild.parentNode is not None :\n newChild.parentNode.removeChild(newChild)\n try :\n index=self.childNodes.index(oldChild)\n except ValueError:\n raise xml.dom.NotFoundErr()\n self.childNodes[index]=newChild\n newChild.parentNode=self\n oldChild.parentNode=None\n if (newChild.nodeType in _nodeTypes_with_children\n or oldChild.nodeType in _nodeTypes_with_children):\n _clear_id_cache(self)\n newChild.nextSibling=oldChild.nextSibling\n newChild.previousSibling=oldChild.previousSibling\n oldChild.nextSibling=None\n oldChild.previousSibling=None\n if newChild.previousSibling:\n newChild.previousSibling.nextSibling=newChild\n if newChild.nextSibling:\n newChild.nextSibling.previousSibling=newChild\n return oldChild\n \n def removeChild(self,oldChild):\n try :\n self.childNodes.remove(oldChild)\n except ValueError:\n raise xml.dom.NotFoundErr()\n if oldChild.nextSibling is not None :\n oldChild.nextSibling.previousSibling=oldChild.previousSibling\n if oldChild.previousSibling is not None :\n oldChild.previousSibling.nextSibling=oldChild.nextSibling\n oldChild.nextSibling=oldChild.previousSibling=None\n if oldChild.nodeType in _nodeTypes_with_children:\n _clear_id_cache(self)\n \n oldChild.parentNode=None\n return oldChild\n \n def normalize(self):\n L=[]\n for child in self.childNodes:\n if child.nodeType ==Node.TEXT_NODE:\n if not child.data:\n \n if L:\n L[-1].nextSibling=child.nextSibling\n if child.nextSibling:\n child.nextSibling.previousSibling=child.previousSibling\n child.unlink()\n elif L and L[-1].nodeType ==child.nodeType:\n \n node=L[-1]\n node.data=node.data+child.data\n node.nextSibling=child.nextSibling\n if child.nextSibling:\n child.nextSibling.previousSibling=node\n child.unlink()\n else :\n L.append(child)\n else :\n L.append(child)\n if child.nodeType ==Node.ELEMENT_NODE:\n child.normalize()\n self.childNodes[:]=L\n \n def cloneNode(self,deep):\n return _clone_node(self,deep,self.ownerDocument or self)\n \n def isSupported(self,feature,version):\n return self.ownerDocument.implementation.hasFeature(feature,version)\n \n def _get_localName(self):\n \n return None\n \n \n \n def isSameNode(self,other):\n return self is other\n \n def getInterface(self,feature):\n if self.isSupported(feature,None ):\n return self\n else :\n return None\n \n \n \n \n \n def getUserData(self,key):\n try :\n return self._user_data[key][0]\n except (AttributeError,KeyError):\n return None\n \n def setUserData(self,key,data,handler):\n old=None\n try :\n d=self._user_data\n except AttributeError:\n d={}\n self._user_data=d\n if key in d:\n old=d[key][0]\n if data is None :\n \n handler=None\n if old is not None :\n del d[key]\n else :\n d[key]=(data,handler)\n return old\n \n def _call_user_data_handler(self,operation,src,dst):\n if hasattr(self,\"_user_data\"):\n for key,(data,handler)in list(self._user_data.items()):\n if handler is not None :\n handler.handle(operation,key,data,src,dst)\n \n \n \n def unlink(self):\n self.parentNode=self.ownerDocument=None\n if self.childNodes:\n for child in self.childNodes:\n child.unlink()\n self.childNodes=NodeList()\n self.previousSibling=None\n self.nextSibling=None\n \n \n \n def __enter__(self):\n return self\n \n def __exit__(self,et,ev,tb):\n self.unlink()\n \ndefproperty(Node,\"firstChild\",doc=\"First child node, or None.\")\ndefproperty(Node,\"lastChild\",doc=\"Last child node, or None.\")\ndefproperty(Node,\"localName\",doc=\"Namespace-local name of this node.\")\n\n\ndef _append_child(self,node):\n\n childNodes=self.childNodes\n if childNodes:\n last=childNodes[-1]\n node.previousSibling=last\n last.nextSibling=node\n childNodes.append(node)\n node.parentNode=self\n \ndef _in_document(node):\n\n while node is not None :\n if node.nodeType ==Node.DOCUMENT_NODE:\n return True\n node=node.parentNode\n return False\n \ndef _write_data(writer,data):\n ''\n if data:\n data=data.replace(\"&\",\"&\").replace(\"<\",\"<\"). replace(\"\\\"\",\""\").replace(\">\",\">\")\n writer.write(data)\n \ndef _get_elements_by_tagName_helper(parent,name,rc):\n for node in parent.childNodes:\n if node.nodeType ==Node.ELEMENT_NODE and (name ==\"*\"or node.tagName ==name):\n rc.append(node)\n _get_elements_by_tagName_helper(node,name,rc)\n return rc\n \ndef _get_elements_by_tagName_ns_helper(parent,nsURI,localName,rc):\n for node in parent.childNodes:\n if node.nodeType ==Node.ELEMENT_NODE:\n if ((localName ==\"*\"or node.localName ==localName)and\n (nsURI ==\"*\"or node.namespaceURI ==nsURI)):\n rc.append(node)\n _get_elements_by_tagName_ns_helper(node,nsURI,localName,rc)\n return rc\n \nclass DocumentFragment(Node):\n nodeType=Node.DOCUMENT_FRAGMENT_NODE\n nodeName=\"#document-fragment\"\n nodeValue=None\n attributes=None\n parentNode=None\n _child_node_types=(Node.ELEMENT_NODE,\n Node.TEXT_NODE,\n Node.CDATA_SECTION_NODE,\n Node.ENTITY_REFERENCE_NODE,\n Node.PROCESSING_INSTRUCTION_NODE,\n Node.COMMENT_NODE,\n Node.NOTATION_NODE)\n \n def __init__(self):\n self.childNodes=NodeList()\n \n \nclass Attr(Node):\n __slots__=('_name','_value','namespaceURI',\n '_prefix','childNodes','_localName','ownerDocument','ownerElement')\n nodeType=Node.ATTRIBUTE_NODE\n attributes=None\n specified=False\n _is_id=False\n \n _child_node_types=(Node.TEXT_NODE,Node.ENTITY_REFERENCE_NODE)\n \n def __init__(self,qName,namespaceURI=EMPTY_NAMESPACE,localName=None ,\n prefix=None ):\n self.ownerElement=None\n self._name=qName\n self.namespaceURI=namespaceURI\n self._prefix=prefix\n self.childNodes=NodeList()\n \n \n self.childNodes.append(Text())\n \n \n \n def _get_localName(self):\n try :\n return self._localName\n except AttributeError:\n return self.nodeName.split(\":\",1)[-1]\n \n def _get_name(self):\n return self.name\n \n def _get_specified(self):\n return self.specified\n \n def _get_name(self):\n return self._name\n \n def _set_name(self,value):\n self._name=value\n if self.ownerElement is not None :\n _clear_id_cache(self.ownerElement)\n \n nodeName=name=property(_get_name,_set_name)\n \n def _get_value(self):\n return self._value\n \n def _set_value(self,value):\n self._value=value\n self.childNodes[0].data=value\n if self.ownerElement is not None :\n _clear_id_cache(self.ownerElement)\n self.childNodes[0].data=value\n \n nodeValue=value=property(_get_value,_set_value)\n \n def _get_prefix(self):\n return self._prefix\n \n def _set_prefix(self,prefix):\n nsuri=self.namespaceURI\n if prefix ==\"xmlns\":\n if nsuri and nsuri !=XMLNS_NAMESPACE:\n raise xml.dom.NamespaceErr(\n \"illegal use of 'xmlns' prefix for the wrong namespace\")\n self._prefix=prefix\n if prefix is None :\n newName=self.localName\n else :\n newName=\"%s:%s\"%(prefix,self.localName)\n if self.ownerElement:\n _clear_id_cache(self.ownerElement)\n self.name=newName\n \n prefix=property(_get_prefix,_set_prefix)\n \n def unlink(self):\n \n \n \n \n elem=self.ownerElement\n if elem is not None :\n del elem._attrs[self.nodeName]\n del elem._attrsNS[(self.namespaceURI,self.localName)]\n if self._is_id:\n self._is_id=False\n elem._magic_id_nodes -=1\n self.ownerDocument._magic_id_count -=1\n for child in self.childNodes:\n child.unlink()\n del self.childNodes[:]\n \n def _get_isId(self):\n if self._is_id:\n return True\n doc=self.ownerDocument\n elem=self.ownerElement\n if doc is None or elem is None :\n return False\n \n info=doc._get_elem_info(elem)\n if info is None :\n return False\n if self.namespaceURI:\n return info.isIdNS(self.namespaceURI,self.localName)\n else :\n return info.isId(self.nodeName)\n \n def _get_schemaType(self):\n doc=self.ownerDocument\n elem=self.ownerElement\n if doc is None or elem is None :\n return _no_type\n \n info=doc._get_elem_info(elem)\n if info is None :\n return _no_type\n if self.namespaceURI:\n return info.getAttributeTypeNS(self.namespaceURI,self.localName)\n else :\n return info.getAttributeType(self.nodeName)\n \ndefproperty(Attr,\"isId\",doc=\"True if this attribute is an ID.\")\ndefproperty(Attr,\"localName\",doc=\"Namespace-local name of this attribute.\")\ndefproperty(Attr,\"schemaType\",doc=\"Schema type for this attribute.\")\n\n\nclass NamedNodeMap(object):\n ''\n\n\n\n\n\n \n \n __slots__=('_attrs','_attrsNS','_ownerElement')\n \n def __init__(self,attrs,attrsNS,ownerElement):\n self._attrs=attrs\n self._attrsNS=attrsNS\n self._ownerElement=ownerElement\n \n def _get_length(self):\n return len(self._attrs)\n \n def item(self,index):\n try :\n return self[list(self._attrs.keys())[index]]\n except IndexError:\n return None\n \n def items(self):\n L=[]\n for node in self._attrs.values():\n L.append((node.nodeName,node.value))\n return L\n \n def itemsNS(self):\n L=[]\n for node in self._attrs.values():\n L.append(((node.namespaceURI,node.localName),node.value))\n return L\n \n def __contains__(self,key):\n if isinstance(key,str):\n return key in self._attrs\n else :\n return key in self._attrsNS\n \n def keys(self):\n return self._attrs.keys()\n \n def keysNS(self):\n return self._attrsNS.keys()\n \n def values(self):\n return self._attrs.values()\n \n def get(self,name,value=None ):\n return self._attrs.get(name,value)\n \n __len__=_get_length\n \n def _cmp(self,other):\n if self._attrs is getattr(other,\"_attrs\",None ):\n return 0\n else :\n return (id(self)>id(other))-(id(self)=0\n \n def __gt__(self,other):\n return self._cmp(other)>0\n \n def __le__(self,other):\n return self._cmp(other)<=0\n \n def __lt__(self,other):\n return self._cmp(other)<0\n \n def __ne__(self,other):\n return self._cmp(other)!=0\n \n def __getitem__(self,attname_or_tuple):\n if isinstance(attname_or_tuple,tuple):\n return self._attrsNS[attname_or_tuple]\n else :\n return self._attrs[attname_or_tuple]\n \n \n def __setitem__(self,attname,value):\n if isinstance(value,str):\n try :\n node=self._attrs[attname]\n except KeyError:\n node=Attr(attname)\n node.ownerDocument=self._ownerElement.ownerDocument\n self.setNamedItem(node)\n node.value=value\n else :\n if not isinstance(value,Attr):\n raise TypeError(\"value must be a string or Attr object\")\n node=value\n self.setNamedItem(node)\n \n def getNamedItem(self,name):\n try :\n return self._attrs[name]\n except KeyError:\n return None\n \n def getNamedItemNS(self,namespaceURI,localName):\n try :\n return self._attrsNS[(namespaceURI,localName)]\n except KeyError:\n return None\n \n def removeNamedItem(self,name):\n n=self.getNamedItem(name)\n if n is not None :\n _clear_id_cache(self._ownerElement)\n del self._attrs[n.nodeName]\n del self._attrsNS[(n.namespaceURI,n.localName)]\n if hasattr(n,'ownerElement'):\n n.ownerElement=None\n return n\n else :\n raise xml.dom.NotFoundErr()\n \n def removeNamedItemNS(self,namespaceURI,localName):\n n=self.getNamedItemNS(namespaceURI,localName)\n if n is not None :\n _clear_id_cache(self._ownerElement)\n del self._attrsNS[(n.namespaceURI,n.localName)]\n del self._attrs[n.nodeName]\n if hasattr(n,'ownerElement'):\n n.ownerElement=None\n return n\n else :\n raise xml.dom.NotFoundErr()\n \n def setNamedItem(self,node):\n if not isinstance(node,Attr):\n raise xml.dom.HierarchyRequestErr(\n \"%s cannot be child of %s\"%(repr(node),repr(self)))\n old=self._attrs.get(node.name)\n if old:\n old.unlink()\n self._attrs[node.name]=node\n self._attrsNS[(node.namespaceURI,node.localName)]=node\n node.ownerElement=self._ownerElement\n _clear_id_cache(node.ownerElement)\n return old\n \n def setNamedItemNS(self,node):\n return self.setNamedItem(node)\n \n def __delitem__(self,attname_or_tuple):\n node=self[attname_or_tuple]\n _clear_id_cache(node.ownerElement)\n node.unlink()\n \n def __getstate__(self):\n return self._attrs,self._attrsNS,self._ownerElement\n \n def __setstate__(self,state):\n self._attrs,self._attrsNS,self._ownerElement=state\n \ndefproperty(NamedNodeMap,\"length\",\ndoc=\"Number of nodes in the NamedNodeMap.\")\n\nAttributeList=NamedNodeMap\n\n\nclass TypeInfo(object):\n __slots__='namespace','name'\n \n def __init__(self,namespace,name):\n self.namespace=namespace\n self.name=name\n \n def __repr__(self):\n if self.namespace:\n return\"\"%(self.name,self.namespace)\n else :\n return\"\"%self.name\n \n def _get_name(self):\n return self.name\n \n def _get_namespace(self):\n return self.namespace\n \n_no_type=TypeInfo(None ,None )\n\nclass Element(Node):\n __slots__=('ownerDocument','parentNode','tagName','nodeName','prefix',\n 'namespaceURI','_localName','childNodes','_attrs','_attrsNS',\n 'nextSibling','previousSibling')\n nodeType=Node.ELEMENT_NODE\n nodeValue=None\n schemaType=_no_type\n \n _magic_id_nodes=0\n \n _child_node_types=(Node.ELEMENT_NODE,\n Node.PROCESSING_INSTRUCTION_NODE,\n Node.COMMENT_NODE,\n Node.TEXT_NODE,\n Node.CDATA_SECTION_NODE,\n Node.ENTITY_REFERENCE_NODE)\n \n def __init__(self,tagName,namespaceURI=EMPTY_NAMESPACE,prefix=None ,\n localName=None ):\n self.parentNode=None\n self.tagName=self.nodeName=tagName\n self.prefix=prefix\n self.namespaceURI=namespaceURI\n self.childNodes=NodeList()\n self.nextSibling=self.previousSibling=None\n \n \n \n \n \n \n \n \n \n self._attrs=None\n self._attrsNS=None\n \n def _ensure_attributes(self):\n if self._attrs is None :\n self._attrs={}\n self._attrsNS={}\n \n def _get_localName(self):\n try :\n return self._localName\n except AttributeError:\n return self.tagName.split(\":\",1)[-1]\n \n def _get_tagName(self):\n return self.tagName\n \n def unlink(self):\n if self._attrs is not None :\n for attr in list(self._attrs.values()):\n attr.unlink()\n self._attrs=None\n self._attrsNS=None\n Node.unlink(self)\n \n def getAttribute(self,attname):\n if self._attrs is None :\n return\"\"\n try :\n return self._attrs[attname].value\n except KeyError:\n return\"\"\n \n def getAttributeNS(self,namespaceURI,localName):\n if self._attrsNS is None :\n return\"\"\n try :\n return self._attrsNS[(namespaceURI,localName)].value\n except KeyError:\n return\"\"\n \n def setAttribute(self,attname,value):\n attr=self.getAttributeNode(attname)\n if attr is None :\n attr=Attr(attname)\n attr.value=value\n attr.ownerDocument=self.ownerDocument\n self.setAttributeNode(attr)\n elif value !=attr.value:\n attr.value=value\n if attr.isId:\n _clear_id_cache(self)\n \n def setAttributeNS(self,namespaceURI,qualifiedName,value):\n prefix,localname=_nssplit(qualifiedName)\n attr=self.getAttributeNodeNS(namespaceURI,localname)\n if attr is None :\n attr=Attr(qualifiedName,namespaceURI,localname,prefix)\n attr.value=value\n attr.ownerDocument=self.ownerDocument\n self.setAttributeNode(attr)\n else :\n if value !=attr.value:\n attr.value=value\n if attr.isId:\n _clear_id_cache(self)\n if attr.prefix !=prefix:\n attr.prefix=prefix\n attr.nodeName=qualifiedName\n \n def getAttributeNode(self,attrname):\n if self._attrs is None :\n return None\n return self._attrs.get(attrname)\n \n def getAttributeNodeNS(self,namespaceURI,localName):\n if self._attrsNS is None :\n return None\n return self._attrsNS.get((namespaceURI,localName))\n \n def setAttributeNode(self,attr):\n if attr.ownerElement not in (None ,self):\n raise xml.dom.InuseAttributeErr(\"attribute node already owned\")\n self._ensure_attributes()\n old1=self._attrs.get(attr.name,None )\n if old1 is not None :\n self.removeAttributeNode(old1)\n old2=self._attrsNS.get((attr.namespaceURI,attr.localName),None )\n if old2 is not None and old2 is not old1:\n self.removeAttributeNode(old2)\n _set_attribute_node(self,attr)\n \n if old1 is not attr:\n \n \n return old1\n if old2 is not attr:\n return old2\n \n setAttributeNodeNS=setAttributeNode\n \n def removeAttribute(self,name):\n if self._attrsNS is None :\n raise xml.dom.NotFoundErr()\n try :\n attr=self._attrs[name]\n except KeyError:\n raise xml.dom.NotFoundErr()\n self.removeAttributeNode(attr)\n \n def removeAttributeNS(self,namespaceURI,localName):\n if self._attrsNS is None :\n raise xml.dom.NotFoundErr()\n try :\n attr=self._attrsNS[(namespaceURI,localName)]\n except KeyError:\n raise xml.dom.NotFoundErr()\n self.removeAttributeNode(attr)\n \n def removeAttributeNode(self,node):\n if node is None :\n raise xml.dom.NotFoundErr()\n try :\n self._attrs[node.name]\n except KeyError:\n raise xml.dom.NotFoundErr()\n _clear_id_cache(self)\n node.unlink()\n \n \n node.ownerDocument=self.ownerDocument\n \n removeAttributeNodeNS=removeAttributeNode\n \n def hasAttribute(self,name):\n if self._attrs is None :\n return False\n return name in self._attrs\n \n def hasAttributeNS(self,namespaceURI,localName):\n if self._attrsNS is None :\n return False\n return (namespaceURI,localName)in self._attrsNS\n \n def getElementsByTagName(self,name):\n return _get_elements_by_tagName_helper(self,name,NodeList())\n \n def getElementsByTagNameNS(self,namespaceURI,localName):\n return _get_elements_by_tagName_ns_helper(\n self,namespaceURI,localName,NodeList())\n \n def __repr__(self):\n return\"\"%(self.tagName,id(self))\n \n def writexml(self,writer,indent=\"\",addindent=\"\",newl=\"\"):\n \n \n \n writer.write(indent+\"<\"+self.tagName)\n \n attrs=self._get_attributes()\n a_names=sorted(attrs.keys())\n \n for a_name in a_names:\n writer.write(\" %s=\\\"\"%a_name)\n _write_data(writer,attrs[a_name].value)\n writer.write(\"\\\"\")\n if self.childNodes:\n writer.write(\">\")\n if (len(self.childNodes)==1 and\n self.childNodes[0].nodeType ==Node.TEXT_NODE):\n self.childNodes[0].writexml(writer,'','','')\n else :\n writer.write(newl)\n for node in self.childNodes:\n node.writexml(writer,indent+addindent,addindent,newl)\n writer.write(indent)\n writer.write(\"%s\"%(self.tagName,newl))\n else :\n writer.write(\"/>%s\"%(newl))\n \n def _get_attributes(self):\n self._ensure_attributes()\n return NamedNodeMap(self._attrs,self._attrsNS,self)\n \n def hasAttributes(self):\n if self._attrs:\n return True\n else :\n return False\n \n \n \n def setIdAttribute(self,name):\n idAttr=self.getAttributeNode(name)\n self.setIdAttributeNode(idAttr)\n \n def setIdAttributeNS(self,namespaceURI,localName):\n idAttr=self.getAttributeNodeNS(namespaceURI,localName)\n self.setIdAttributeNode(idAttr)\n \n def setIdAttributeNode(self,idAttr):\n if idAttr is None or not self.isSameNode(idAttr.ownerElement):\n raise xml.dom.NotFoundErr()\n if _get_containing_entref(self)is not None :\n raise xml.dom.NoModificationAllowedErr()\n if not idAttr._is_id:\n idAttr._is_id=True\n self._magic_id_nodes +=1\n self.ownerDocument._magic_id_count +=1\n _clear_id_cache(self)\n \ndefproperty(Element,\"attributes\",\ndoc=\"NamedNodeMap of attributes on the element.\")\ndefproperty(Element,\"localName\",\ndoc=\"Namespace-local name of this element.\")\n\n\ndef _set_attribute_node(element,attr):\n _clear_id_cache(element)\n element._ensure_attributes()\n element._attrs[attr.name]=attr\n element._attrsNS[(attr.namespaceURI,attr.localName)]=attr\n \n \n \n \n attr.ownerElement=element\n \nclass Childless:\n ''\n\n \n __slots__=()\n \n attributes=None\n childNodes=EmptyNodeList()\n firstChild=None\n lastChild=None\n \n def _get_firstChild(self):\n return None\n \n def _get_lastChild(self):\n return None\n \n def appendChild(self,node):\n raise xml.dom.HierarchyRequestErr(\n self.nodeName+\" nodes cannot have children\")\n \n def hasChildNodes(self):\n return False\n \n def insertBefore(self,newChild,refChild):\n raise xml.dom.HierarchyRequestErr(\n self.nodeName+\" nodes do not have children\")\n \n def removeChild(self,oldChild):\n raise xml.dom.NotFoundErr(\n self.nodeName+\" nodes do not have children\")\n \n def normalize(self):\n \n pass\n \n def replaceChild(self,newChild,oldChild):\n raise xml.dom.HierarchyRequestErr(\n self.nodeName+\" nodes do not have children\")\n \n \nclass ProcessingInstruction(Childless,Node):\n nodeType=Node.PROCESSING_INSTRUCTION_NODE\n __slots__=('target','data')\n \n def __init__(self,target,data):\n self.target=target\n self.data=data\n \n \n def _get_nodeValue(self):\n return self.data\n def _set_nodeValue(self,value):\n self.data=data\n nodeValue=property(_get_nodeValue,_set_nodeValue)\n \n \n def _get_nodeName(self):\n return self.target\n def _set_nodeName(self,value):\n self.target=value\n nodeName=property(_get_nodeName,_set_nodeName)\n \n def writexml(self,writer,indent=\"\",addindent=\"\",newl=\"\"):\n writer.write(\"%s%s\"%(indent,self.target,self.data,newl))\n \n \nclass CharacterData(Childless,Node):\n __slots__=('_data','ownerDocument','parentNode','previousSibling','nextSibling')\n \n def __init__(self):\n self.ownerDocument=self.parentNode=None\n self.previousSibling=self.nextSibling=None\n self._data=''\n Node.__init__(self)\n \n def _get_length(self):\n return len(self.data)\n __len__=_get_length\n \n def _get_data(self):\n return self._data\n def _set_data(self,data):\n self._data=data\n \n data=nodeValue=property(_get_data,_set_data)\n \n def __repr__(self):\n data=self.data\n if len(data)>10:\n dotdotdot=\"...\"\n else :\n dotdotdot=\"\"\n return''%(\n self.__class__.__name__,data[0:10],dotdotdot)\n \n def substringData(self,offset,count):\n if offset <0:\n raise xml.dom.IndexSizeErr(\"offset cannot be negative\")\n if offset >=len(self.data):\n raise xml.dom.IndexSizeErr(\"offset cannot be beyond end of data\")\n if count <0:\n raise xml.dom.IndexSizeErr(\"count cannot be negative\")\n return self.data[offset:offset+count]\n \n def appendData(self,arg):\n self.data=self.data+arg\n \n def insertData(self,offset,arg):\n if offset <0:\n raise xml.dom.IndexSizeErr(\"offset cannot be negative\")\n if offset >=len(self.data):\n raise xml.dom.IndexSizeErr(\"offset cannot be beyond end of data\")\n if arg:\n self.data=\"%s%s%s\"%(\n self.data[:offset],arg,self.data[offset:])\n \n def deleteData(self,offset,count):\n if offset <0:\n raise xml.dom.IndexSizeErr(\"offset cannot be negative\")\n if offset >=len(self.data):\n raise xml.dom.IndexSizeErr(\"offset cannot be beyond end of data\")\n if count <0:\n raise xml.dom.IndexSizeErr(\"count cannot be negative\")\n if count:\n self.data=self.data[:offset]+self.data[offset+count:]\n \n def replaceData(self,offset,count,arg):\n if offset <0:\n raise xml.dom.IndexSizeErr(\"offset cannot be negative\")\n if offset >=len(self.data):\n raise xml.dom.IndexSizeErr(\"offset cannot be beyond end of data\")\n if count <0:\n raise xml.dom.IndexSizeErr(\"count cannot be negative\")\n if count:\n self.data=\"%s%s%s\"%(\n self.data[:offset],arg,self.data[offset+count:])\n \ndefproperty(CharacterData,\"length\",doc=\"Length of the string data.\")\n\n\nclass Text(CharacterData):\n __slots__=()\n \n nodeType=Node.TEXT_NODE\n nodeName=\"#text\"\n attributes=None\n \n def splitText(self,offset):\n if offset <0 or offset >len(self.data):\n raise xml.dom.IndexSizeErr(\"illegal offset value\")\n newText=self.__class__()\n newText.data=self.data[offset:]\n newText.ownerDocument=self.ownerDocument\n next=self.nextSibling\n if self.parentNode and self in self.parentNode.childNodes:\n if next is None :\n self.parentNode.appendChild(newText)\n else :\n self.parentNode.insertBefore(newText,next)\n self.data=self.data[:offset]\n return newText\n \n def writexml(self,writer,indent=\"\",addindent=\"\",newl=\"\"):\n _write_data(writer,\"%s%s%s\"%(indent,self.data,newl))\n \n \n \n def _get_wholeText(self):\n L=[self.data]\n n=self.previousSibling\n while n is not None :\n if n.nodeType in (Node.TEXT_NODE,Node.CDATA_SECTION_NODE):\n L.insert(0,n.data)\n n=n.previousSibling\n else :\n break\n n=self.nextSibling\n while n is not None :\n if n.nodeType in (Node.TEXT_NODE,Node.CDATA_SECTION_NODE):\n L.append(n.data)\n n=n.nextSibling\n else :\n break\n return''.join(L)\n \n def replaceWholeText(self,content):\n \n \n parent=self.parentNode\n n=self.previousSibling\n while n is not None :\n if n.nodeType in (Node.TEXT_NODE,Node.CDATA_SECTION_NODE):\n next=n.previousSibling\n parent.removeChild(n)\n n=next\n else :\n break\n n=self.nextSibling\n if not content:\n parent.removeChild(self)\n while n is not None :\n if n.nodeType in (Node.TEXT_NODE,Node.CDATA_SECTION_NODE):\n next=n.nextSibling\n parent.removeChild(n)\n n=next\n else :\n break\n if content:\n self.data=content\n return self\n else :\n return None\n \n def _get_isWhitespaceInElementContent(self):\n if self.data.strip():\n return False\n elem=_get_containing_element(self)\n if elem is None :\n return False\n info=self.ownerDocument._get_elem_info(elem)\n if info is None :\n return False\n else :\n return info.isElementContent()\n \ndefproperty(Text,\"isWhitespaceInElementContent\",\ndoc=\"True iff this text node contains only whitespace\"\n\" and is in element content.\")\ndefproperty(Text,\"wholeText\",\ndoc=\"The text of all logically-adjacent text nodes.\")\n\n\ndef _get_containing_element(node):\n c=node.parentNode\n while c is not None :\n if c.nodeType ==Node.ELEMENT_NODE:\n return c\n c=c.parentNode\n return None\n \ndef _get_containing_entref(node):\n c=node.parentNode\n while c is not None :\n if c.nodeType ==Node.ENTITY_REFERENCE_NODE:\n return c\n c=c.parentNode\n return None\n \n \nclass Comment(CharacterData):\n nodeType=Node.COMMENT_NODE\n nodeName=\"#comment\"\n \n def __init__(self,data):\n CharacterData.__init__(self)\n self._data=data\n \n def writexml(self,writer,indent=\"\",addindent=\"\",newl=\"\"):\n if\"--\"in self.data:\n raise ValueError(\"'--' is not allowed in a comment node\")\n writer.write(\"%s%s\"%(indent,self.data,newl))\n \n \nclass CDATASection(Text):\n __slots__=()\n \n nodeType=Node.CDATA_SECTION_NODE\n nodeName=\"#cdata-section\"\n \n def writexml(self,writer,indent=\"\",addindent=\"\",newl=\"\"):\n if self.data.find(\"]]>\")>=0:\n raise ValueError(\"']]>' not allowed in a CDATA section\")\n writer.write(\"\"%self.data)\n \n \nclass ReadOnlySequentialNamedNodeMap(object):\n __slots__='_seq',\n \n def __init__(self,seq=()):\n \n self._seq=seq\n \n def __len__(self):\n return len(self._seq)\n \n def _get_length(self):\n return len(self._seq)\n \n def getNamedItem(self,name):\n for n in self._seq:\n if n.nodeName ==name:\n return n\n \n def getNamedItemNS(self,namespaceURI,localName):\n for n in self._seq:\n if n.namespaceURI ==namespaceURI and n.localName ==localName:\n return n\n \n def __getitem__(self,name_or_tuple):\n if isinstance(name_or_tuple,tuple):\n node=self.getNamedItemNS(*name_or_tuple)\n else :\n node=self.getNamedItem(name_or_tuple)\n if node is None :\n raise KeyError(name_or_tuple)\n return node\n \n def item(self,index):\n if index <0:\n return None\n try :\n return self._seq[index]\n except IndexError:\n return None\n \n def removeNamedItem(self,name):\n raise xml.dom.NoModificationAllowedErr(\n \"NamedNodeMap instance is read-only\")\n \n def removeNamedItemNS(self,namespaceURI,localName):\n raise xml.dom.NoModificationAllowedErr(\n \"NamedNodeMap instance is read-only\")\n \n def setNamedItem(self,node):\n raise xml.dom.NoModificationAllowedErr(\n \"NamedNodeMap instance is read-only\")\n \n def setNamedItemNS(self,node):\n raise xml.dom.NoModificationAllowedErr(\n \"NamedNodeMap instance is read-only\")\n \n def __getstate__(self):\n return [self._seq]\n \n def __setstate__(self,state):\n self._seq=state[0]\n \ndefproperty(ReadOnlySequentialNamedNodeMap,\"length\",\ndoc=\"Number of entries in the NamedNodeMap.\")\n\n\nclass Identified:\n ''\n \n __slots__='publicId','systemId'\n \n def _identified_mixin_init(self,publicId,systemId):\n self.publicId=publicId\n self.systemId=systemId\n \n def _get_publicId(self):\n return self.publicId\n \n def _get_systemId(self):\n return self.systemId\n \nclass DocumentType(Identified,Childless,Node):\n nodeType=Node.DOCUMENT_TYPE_NODE\n nodeValue=None\n name=None\n publicId=None\n systemId=None\n internalSubset=None\n \n def __init__(self,qualifiedName):\n self.entities=ReadOnlySequentialNamedNodeMap()\n self.notations=ReadOnlySequentialNamedNodeMap()\n if qualifiedName:\n prefix,localname=_nssplit(qualifiedName)\n self.name=localname\n self.nodeName=self.name\n \n def _get_internalSubset(self):\n return self.internalSubset\n \n def cloneNode(self,deep):\n if self.ownerDocument is None :\n \n clone=DocumentType(None )\n clone.name=self.name\n clone.nodeName=self.name\n operation=xml.dom.UserDataHandler.NODE_CLONED\n if deep:\n clone.entities._seq=[]\n clone.notations._seq=[]\n for n in self.notations._seq:\n notation=Notation(n.nodeName,n.publicId,n.systemId)\n clone.notations._seq.append(notation)\n n._call_user_data_handler(operation,n,notation)\n for e in self.entities._seq:\n entity=Entity(e.nodeName,e.publicId,e.systemId,\n e.notationName)\n entity.actualEncoding=e.actualEncoding\n entity.encoding=e.encoding\n entity.version=e.version\n clone.entities._seq.append(entity)\n e._call_user_data_handler(operation,n,entity)\n self._call_user_data_handler(operation,self,clone)\n return clone\n else :\n return None\n \n def writexml(self,writer,indent=\"\",addindent=\"\",newl=\"\"):\n writer.write(\"\"+newl)\n \nclass Entity(Identified,Node):\n attributes=None\n nodeType=Node.ENTITY_NODE\n nodeValue=None\n \n actualEncoding=None\n encoding=None\n version=None\n \n def __init__(self,name,publicId,systemId,notation):\n self.nodeName=name\n self.notationName=notation\n self.childNodes=NodeList()\n self._identified_mixin_init(publicId,systemId)\n \n def _get_actualEncoding(self):\n return self.actualEncoding\n \n def _get_encoding(self):\n return self.encoding\n \n def _get_version(self):\n return self.version\n \n def appendChild(self,newChild):\n raise xml.dom.HierarchyRequestErr(\n \"cannot append children to an entity node\")\n \n def insertBefore(self,newChild,refChild):\n raise xml.dom.HierarchyRequestErr(\n \"cannot insert children below an entity node\")\n \n def removeChild(self,oldChild):\n raise xml.dom.HierarchyRequestErr(\n \"cannot remove children from an entity node\")\n \n def replaceChild(self,newChild,oldChild):\n raise xml.dom.HierarchyRequestErr(\n \"cannot replace children of an entity node\")\n \nclass Notation(Identified,Childless,Node):\n nodeType=Node.NOTATION_NODE\n nodeValue=None\n \n def __init__(self,name,publicId,systemId):\n self.nodeName=name\n self._identified_mixin_init(publicId,systemId)\n \n \nclass DOMImplementation(DOMImplementationLS):\n _features=[(\"core\",\"1.0\"),\n (\"core\",\"2.0\"),\n (\"core\",None ),\n (\"xml\",\"1.0\"),\n (\"xml\",\"2.0\"),\n (\"xml\",None ),\n (\"ls-load\",\"3.0\"),\n (\"ls-load\",None ),\n ]\n \n def hasFeature(self,feature,version):\n if version ==\"\":\n version=None\n return (feature.lower(),version)in self._features\n \n def createDocument(self,namespaceURI,qualifiedName,doctype):\n if doctype and doctype.parentNode is not None :\n raise xml.dom.WrongDocumentErr(\n \"doctype object owned by another DOM tree\")\n doc=self._create_document()\n \n add_root_element=not (namespaceURI is None\n and qualifiedName is None\n and doctype is None )\n \n if not qualifiedName and add_root_element:\n \n \n \n \n \n \n \n \n \n \n \n \n raise xml.dom.InvalidCharacterErr(\"Element with no name\")\n \n if add_root_element:\n prefix,localname=_nssplit(qualifiedName)\n if prefix ==\"xml\" and namespaceURI !=\"http://www.w3.org/XML/1998/namespace\":\n raise xml.dom.NamespaceErr(\"illegal use of 'xml' prefix\")\n if prefix and not namespaceURI:\n raise xml.dom.NamespaceErr(\n \"illegal use of prefix without namespaces\")\n element=doc.createElementNS(namespaceURI,qualifiedName)\n if doctype:\n doc.appendChild(doctype)\n doc.appendChild(element)\n \n if doctype:\n doctype.parentNode=doctype.ownerDocument=doc\n \n doc.doctype=doctype\n doc.implementation=self\n return doc\n \n def createDocumentType(self,qualifiedName,publicId,systemId):\n doctype=DocumentType(qualifiedName)\n doctype.publicId=publicId\n doctype.systemId=systemId\n return doctype\n \n \n \n def getInterface(self,feature):\n if self.hasFeature(feature,None ):\n return self\n else :\n return None\n \n \n def _create_document(self):\n return Document()\n \nclass ElementInfo(object):\n ''\n\n\n\n\n\n \n \n __slots__='tagName',\n \n def __init__(self,name):\n self.tagName=name\n \n def getAttributeType(self,aname):\n return _no_type\n \n def getAttributeTypeNS(self,namespaceURI,localName):\n return _no_type\n \n def isElementContent(self):\n return False\n \n def isEmpty(self):\n ''\n \n return False\n \n def isId(self,aname):\n ''\n return False\n \n def isIdNS(self,namespaceURI,localName):\n ''\n return False\n \n def __getstate__(self):\n return self.tagName\n \n def __setstate__(self,state):\n self.tagName=state\n \ndef _clear_id_cache(node):\n if node.nodeType ==Node.DOCUMENT_NODE:\n node._id_cache.clear()\n node._id_search_stack=None\n elif _in_document(node):\n node.ownerDocument._id_cache.clear()\n node.ownerDocument._id_search_stack=None\n \nclass Document(Node,DocumentLS):\n __slots__=('_elem_info','doctype',\n '_id_search_stack','childNodes','_id_cache')\n _child_node_types=(Node.ELEMENT_NODE,Node.PROCESSING_INSTRUCTION_NODE,\n Node.COMMENT_NODE,Node.DOCUMENT_TYPE_NODE)\n \n implementation=DOMImplementation()\n nodeType=Node.DOCUMENT_NODE\n nodeName=\"#document\"\n nodeValue=None\n attributes=None\n parentNode=None\n previousSibling=nextSibling=None\n \n \n \n \n actualEncoding=None\n encoding=None\n standalone=None\n version=None\n strictErrorChecking=False\n errorHandler=None\n documentURI=None\n \n _magic_id_count=0\n \n def __init__(self):\n self.doctype=None\n self.childNodes=NodeList()\n \n \n self._elem_info={}\n self._id_cache={}\n self._id_search_stack=None\n \n def _get_elem_info(self,element):\n if element.namespaceURI:\n key=element.namespaceURI,element.localName\n else :\n key=element.tagName\n return self._elem_info.get(key)\n \n def _get_actualEncoding(self):\n return self.actualEncoding\n \n def _get_doctype(self):\n return self.doctype\n \n def _get_documentURI(self):\n return self.documentURI\n \n def _get_encoding(self):\n return self.encoding\n \n def _get_errorHandler(self):\n return self.errorHandler\n \n def _get_standalone(self):\n return self.standalone\n \n def _get_strictErrorChecking(self):\n return self.strictErrorChecking\n \n def _get_version(self):\n return self.version\n \n def appendChild(self,node):\n if node.nodeType not in self._child_node_types:\n raise xml.dom.HierarchyRequestErr(\n \"%s cannot be child of %s\"%(repr(node),repr(self)))\n if node.parentNode is not None :\n \n \n \n node.parentNode.removeChild(node)\n \n if node.nodeType ==Node.ELEMENT_NODE and self._get_documentElement():\n raise xml.dom.HierarchyRequestErr(\n \"two document elements disallowed\")\n return Node.appendChild(self,node)\n \n def removeChild(self,oldChild):\n try :\n self.childNodes.remove(oldChild)\n except ValueError:\n raise xml.dom.NotFoundErr()\n oldChild.nextSibling=oldChild.previousSibling=None\n oldChild.parentNode=None\n if self.documentElement is oldChild:\n self.documentElement=None\n \n return oldChild\n \n def _get_documentElement(self):\n for node in self.childNodes:\n if node.nodeType ==Node.ELEMENT_NODE:\n return node\n \n def unlink(self):\n if self.doctype is not None :\n self.doctype.unlink()\n self.doctype=None\n Node.unlink(self)\n \n def cloneNode(self,deep):\n if not deep:\n return None\n clone=self.implementation.createDocument(None ,None ,None )\n clone.encoding=self.encoding\n clone.standalone=self.standalone\n clone.version=self.version\n for n in self.childNodes:\n childclone=_clone_node(n,deep,clone)\n assert childclone.ownerDocument.isSameNode(clone)\n clone.childNodes.append(childclone)\n if childclone.nodeType ==Node.DOCUMENT_NODE:\n assert clone.documentElement is None\n elif childclone.nodeType ==Node.DOCUMENT_TYPE_NODE:\n assert clone.doctype is None\n clone.doctype=childclone\n childclone.parentNode=clone\n self._call_user_data_handler(xml.dom.UserDataHandler.NODE_CLONED,\n self,clone)\n return clone\n \n def createDocumentFragment(self):\n d=DocumentFragment()\n d.ownerDocument=self\n return d\n \n def createElement(self,tagName):\n e=Element(tagName)\n e.ownerDocument=self\n return e\n \n def createTextNode(self,data):\n if not isinstance(data,str):\n raise TypeError(\"node contents must be a string\")\n t=Text()\n t.data=data\n t.ownerDocument=self\n return t\n \n def createCDATASection(self,data):\n if not isinstance(data,str):\n raise TypeError(\"node contents must be a string\")\n c=CDATASection()\n c.data=data\n c.ownerDocument=self\n return c\n \n def createComment(self,data):\n c=Comment(data)\n c.ownerDocument=self\n return c\n \n def createProcessingInstruction(self,target,data):\n p=ProcessingInstruction(target,data)\n p.ownerDocument=self\n return p\n \n def createAttribute(self,qName):\n a=Attr(qName)\n a.ownerDocument=self\n a.value=\"\"\n return a\n \n def createElementNS(self,namespaceURI,qualifiedName):\n prefix,localName=_nssplit(qualifiedName)\n e=Element(qualifiedName,namespaceURI,prefix)\n e.ownerDocument=self\n return e\n \n def createAttributeNS(self,namespaceURI,qualifiedName):\n prefix,localName=_nssplit(qualifiedName)\n a=Attr(qualifiedName,namespaceURI,localName,prefix)\n a.ownerDocument=self\n a.value=\"\"\n return a\n \n \n \n \n def _create_entity(self,name,publicId,systemId,notationName):\n e=Entity(name,publicId,systemId,notationName)\n e.ownerDocument=self\n return e\n \n def _create_notation(self,name,publicId,systemId):\n n=Notation(name,publicId,systemId)\n n.ownerDocument=self\n return n\n \n def getElementById(self,id):\n if id in self._id_cache:\n return self._id_cache[id]\n if not (self._elem_info or self._magic_id_count):\n return None\n \n stack=self._id_search_stack\n if stack is None :\n \n stack=[self.documentElement]\n self._id_search_stack=stack\n elif not stack:\n \n \n return None\n \n result=None\n while stack:\n node=stack.pop()\n \n stack.extend([child for child in node.childNodes\n if child.nodeType in _nodeTypes_with_children])\n \n info=self._get_elem_info(node)\n if info:\n \n \n \n for attr in node.attributes.values():\n if attr.namespaceURI:\n if info.isIdNS(attr.namespaceURI,attr.localName):\n self._id_cache[attr.value]=node\n if attr.value ==id:\n result=node\n elif not node._magic_id_nodes:\n break\n elif info.isId(attr.name):\n self._id_cache[attr.value]=node\n if attr.value ==id:\n result=node\n elif not node._magic_id_nodes:\n break\n elif attr._is_id:\n self._id_cache[attr.value]=node\n if attr.value ==id:\n result=node\n elif node._magic_id_nodes ==1:\n break\n elif node._magic_id_nodes:\n for attr in node.attributes.values():\n if attr._is_id:\n self._id_cache[attr.value]=node\n if attr.value ==id:\n result=node\n if result is not None :\n break\n return result\n \n def getElementsByTagName(self,name):\n return _get_elements_by_tagName_helper(self,name,NodeList())\n \n def getElementsByTagNameNS(self,namespaceURI,localName):\n return _get_elements_by_tagName_ns_helper(\n self,namespaceURI,localName,NodeList())\n \n def isSupported(self,feature,version):\n return self.implementation.hasFeature(feature,version)\n \n def importNode(self,node,deep):\n if node.nodeType ==Node.DOCUMENT_NODE:\n raise xml.dom.NotSupportedErr(\"cannot import document nodes\")\n elif node.nodeType ==Node.DOCUMENT_TYPE_NODE:\n raise xml.dom.NotSupportedErr(\"cannot import document type nodes\")\n return _clone_node(node,deep,self)\n \n def writexml(self,writer,indent=\"\",addindent=\"\",newl=\"\",encoding=None ):\n if encoding is None :\n writer.write(''+newl)\n else :\n writer.write('%s'%(\n encoding,newl))\n for node in self.childNodes:\n node.writexml(writer,indent,addindent,newl)\n \n \n \n def renameNode(self,n,namespaceURI,name):\n if n.ownerDocument is not self:\n raise xml.dom.WrongDocumentErr(\n \"cannot rename nodes from other documents;\\n\"\n \"expected %s,\\nfound %s\"%(self,n.ownerDocument))\n if n.nodeType not in (Node.ELEMENT_NODE,Node.ATTRIBUTE_NODE):\n raise xml.dom.NotSupportedErr(\n \"renameNode() only applies to element and attribute nodes\")\n if namespaceURI !=EMPTY_NAMESPACE:\n if':'in name:\n prefix,localName=name.split(':',1)\n if (prefix ==\"xmlns\"\n and namespaceURI !=xml.dom.XMLNS_NAMESPACE):\n raise xml.dom.NamespaceErr(\n \"illegal use of 'xmlns' prefix\")\n else :\n if (name ==\"xmlns\"\n and namespaceURI !=xml.dom.XMLNS_NAMESPACE\n and n.nodeType ==Node.ATTRIBUTE_NODE):\n raise xml.dom.NamespaceErr(\n \"illegal use of the 'xmlns' attribute\")\n prefix=None\n localName=name\n else :\n prefix=None\n localName=None\n if n.nodeType ==Node.ATTRIBUTE_NODE:\n element=n.ownerElement\n if element is not None :\n is_id=n._is_id\n element.removeAttributeNode(n)\n else :\n element=None\n n.prefix=prefix\n n._localName=localName\n n.namespaceURI=namespaceURI\n n.nodeName=name\n if n.nodeType ==Node.ELEMENT_NODE:\n n.tagName=name\n else :\n \n n.name=name\n if element is not None :\n element.setAttributeNode(n)\n if is_id:\n element.setIdAttributeNode(n)\n \n \n \n \n \n return n\n \ndefproperty(Document,\"documentElement\",\ndoc=\"Top-level element of this document.\")\n\n\ndef _clone_node(node,deep,newOwnerDocument):\n ''\n\n\n \n if node.ownerDocument.isSameNode(newOwnerDocument):\n operation=xml.dom.UserDataHandler.NODE_CLONED\n else :\n operation=xml.dom.UserDataHandler.NODE_IMPORTED\n if node.nodeType ==Node.ELEMENT_NODE:\n clone=newOwnerDocument.createElementNS(node.namespaceURI,\n node.nodeName)\n for attr in node.attributes.values():\n clone.setAttributeNS(attr.namespaceURI,attr.nodeName,attr.value)\n a=clone.getAttributeNodeNS(attr.namespaceURI,attr.localName)\n a.specified=attr.specified\n \n if deep:\n for child in node.childNodes:\n c=_clone_node(child,deep,newOwnerDocument)\n clone.appendChild(c)\n \n elif node.nodeType ==Node.DOCUMENT_FRAGMENT_NODE:\n clone=newOwnerDocument.createDocumentFragment()\n if deep:\n for child in node.childNodes:\n c=_clone_node(child,deep,newOwnerDocument)\n clone.appendChild(c)\n \n elif node.nodeType ==Node.TEXT_NODE:\n clone=newOwnerDocument.createTextNode(node.data)\n elif node.nodeType ==Node.CDATA_SECTION_NODE:\n clone=newOwnerDocument.createCDATASection(node.data)\n elif node.nodeType ==Node.PROCESSING_INSTRUCTION_NODE:\n clone=newOwnerDocument.createProcessingInstruction(node.target,\n node.data)\n elif node.nodeType ==Node.COMMENT_NODE:\n clone=newOwnerDocument.createComment(node.data)\n elif node.nodeType ==Node.ATTRIBUTE_NODE:\n clone=newOwnerDocument.createAttributeNS(node.namespaceURI,\n node.nodeName)\n clone.specified=True\n clone.value=node.value\n elif node.nodeType ==Node.DOCUMENT_TYPE_NODE:\n assert node.ownerDocument is not newOwnerDocument\n operation=xml.dom.UserDataHandler.NODE_IMPORTED\n clone=newOwnerDocument.implementation.createDocumentType(\n node.name,node.publicId,node.systemId)\n clone.ownerDocument=newOwnerDocument\n if deep:\n clone.entities._seq=[]\n clone.notations._seq=[]\n for n in node.notations._seq:\n notation=Notation(n.nodeName,n.publicId,n.systemId)\n notation.ownerDocument=newOwnerDocument\n clone.notations._seq.append(notation)\n if hasattr(n,'_call_user_data_handler'):\n n._call_user_data_handler(operation,n,notation)\n for e in node.entities._seq:\n entity=Entity(e.nodeName,e.publicId,e.systemId,\n e.notationName)\n entity.actualEncoding=e.actualEncoding\n entity.encoding=e.encoding\n entity.version=e.version\n entity.ownerDocument=newOwnerDocument\n clone.entities._seq.append(entity)\n if hasattr(e,'_call_user_data_handler'):\n e._call_user_data_handler(operation,n,entity)\n else :\n \n \n \n raise xml.dom.NotSupportedErr(\"Cannot clone node %s\"%repr(node))\n \n \n \n \n if hasattr(node,'_call_user_data_handler'):\n node._call_user_data_handler(operation,node,clone)\n return clone\n \n \ndef _nssplit(qualifiedName):\n fields=qualifiedName.split(':',1)\n if len(fields)==2:\n return fields\n else :\n return (None ,fields[0])\n \n \ndef _do_pulldom_parse(func,args,kwargs):\n events=func(*args,**kwargs)\n toktype,rootNode=events.getEvent()\n events.expandNode(rootNode)\n events.clear()\n return rootNode\n \ndef parse(file,parser=None ,bufsize=None ):\n ''\n if parser is None and not bufsize:\n from xml.dom import expatbuilder\n return expatbuilder.parse(file)\n else :\n from xml.dom import pulldom\n return _do_pulldom_parse(pulldom.parse,(file,),\n {'parser':parser,'bufsize':bufsize})\n \ndef parseString(string,parser=None ):\n ''\n if parser is None :\n from xml.dom import expatbuilder\n return expatbuilder.parseString(string)\n else :\n from xml.dom import pulldom\n return _do_pulldom_parse(pulldom.parseString,(string,),\n {'parser':parser})\n \ndef getDOMImplementation(features=None ):\n if features:\n if isinstance(features,str):\n features=domreg._parse_feature_string(features)\n for f,v in features:\n if not Document.implementation.hasFeature(f,v):\n return None\n return Document.implementation\n"], "xml.dom.domreg": [".py", "''\n\n\n\n\n\n\n\nwell_known_implementations={\n'minidom':'xml.dom.minidom',\n'4DOM':'xml.dom.DOMImplementation',\n}\n\n\n\n\nregistered={}\n\ndef registerDOMImplementation(name,factory):\n ''\n\n\n\n\n\n \n \n registered[name]=factory\n \ndef _good_enough(dom,features):\n ''\n for f,v in features:\n if not dom.hasFeature(f,v):\n return 0\n return 1\n \ndef getDOMImplementation(name=None ,features=()):\n ''\n\n\n\n\n\n\n\n\n\n \n \n import os\n creator=None\n mod=well_known_implementations.get(name)\n if mod:\n mod=__import__(mod,{},{},['getDOMImplementation'])\n return mod.getDOMImplementation()\n elif name:\n return registered[name]()\n elif\"PYTHON_DOM\"in os.environ:\n return getDOMImplementation(name=os.environ[\"PYTHON_DOM\"])\n \n \n \n if isinstance(features,str):\n features=_parse_feature_string(features)\n for creator in registered.values():\n dom=creator()\n if _good_enough(dom,features):\n return dom\n \n for creator in well_known_implementations.keys():\n try :\n dom=getDOMImplementation(name=creator)\n except Exception:\n continue\n if _good_enough(dom,features):\n return dom\n \n raise ImportError(\"no suitable DOM implementation found\")\n \ndef _parse_feature_string(s):\n features=[]\n parts=s.split()\n i=0\n length=len(parts)\n while i 0:\n if not block:\n if self._qsize()>=self.maxsize:\n raise Full\n elif timeout is None :\n while self._qsize()>=self.maxsize:\n self.not_full.wait()\n elif timeout <0:\n raise ValueError(\"'timeout' must be a non-negative number\")\n else :\n endtime=time()+timeout\n while self._qsize()>=self.maxsize:\n remaining=endtime -time()\n if remaining <=0.0:\n raise Full\n self.not_full.wait(remaining)\n self._put(item)\n self.unfinished_tasks +=1\n self.not_empty.notify()\n \n def get(self,block=True ,timeout=None ):\n ''\n\n\n\n\n\n\n\n\n \n with self.not_empty:\n if not block:\n if not self._qsize():\n raise Empty\n elif timeout is None :\n while not self._qsize():\n self.not_empty.wait()\n elif timeout <0:\n raise ValueError(\"'timeout' must be a non-negative number\")\n else :\n endtime=time()+timeout\n while not self._qsize():\n remaining=endtime -time()\n if remaining <=0.0:\n raise Empty\n self.not_empty.wait(remaining)\n item=self._get()\n self.not_full.notify()\n return item\n \n def put_nowait(self,item):\n ''\n\n\n\n \n return self.put(item,block=False )\n \n def get_nowait(self):\n ''\n\n\n\n \n return self.get(block=False )\n \n \n \n \n \n \n def _init(self,maxsize):\n self.queue=deque()\n \n def _qsize(self):\n return len(self.queue)\n \n \n def _put(self,item):\n self.queue.append(item)\n \n \n def _get(self):\n return self.queue.popleft()\n \n \nclass PriorityQueue(Queue):\n ''\n\n\n \n \n def _init(self,maxsize):\n self.queue=[]\n \n def _qsize(self):\n return len(self.queue)\n \n def _put(self,item):\n heappush(self.queue,item)\n \n def _get(self):\n return heappop(self.queue)\n \n \nclass LifoQueue(Queue):\n ''\n \n def _init(self,maxsize):\n self.queue=[]\n \n def _qsize(self):\n return len(self.queue)\n \n def _put(self,item):\n self.queue.append(item)\n \n def _get(self):\n return self.queue.pop()\n"], "encodings.cp1258": [".py", "''\n\n\n\nimport codecs\n\n\n\nclass Codec(codecs.Codec):\n\n def encode(self,input,errors='strict'):\n return codecs.charmap_encode(input,errors,encoding_table)\n \n def decode(self,input,errors='strict'):\n return codecs.charmap_decode(input,errors,decoding_table)\n \nclass IncrementalEncoder(codecs.IncrementalEncoder):\n def encode(self,input,final=False ):\n return codecs.charmap_encode(input,self.errors,encoding_table)[0]\n \nclass IncrementalDecoder(codecs.IncrementalDecoder):\n def decode(self,input,final=False ):\n return codecs.charmap_decode(input,self.errors,decoding_table)[0]\n \nclass StreamWriter(Codec,codecs.StreamWriter):\n pass\n \nclass StreamReader(Codec,codecs.StreamReader):\n pass\n \n \n \ndef getregentry():\n return codecs.CodecInfo(\n name='cp1258',\n encode=Codec().encode,\n decode=Codec().decode,\n incrementalencoder=IncrementalEncoder,\n incrementaldecoder=IncrementalDecoder,\n streamreader=StreamReader,\n streamwriter=StreamWriter,\n )\n \n \n \n \ndecoding_table=(\n'\\x00'\n'\\x01'\n'\\x02'\n'\\x03'\n'\\x04'\n'\\x05'\n'\\x06'\n'\\x07'\n'\\x08'\n'\\t'\n'\\n'\n'\\x0b'\n'\\x0c'\n'\\r'\n'\\x0e'\n'\\x0f'\n'\\x10'\n'\\x11'\n'\\x12'\n'\\x13'\n'\\x14'\n'\\x15'\n'\\x16'\n'\\x17'\n'\\x18'\n'\\x19'\n'\\x1a'\n'\\x1b'\n'\\x1c'\n'\\x1d'\n'\\x1e'\n'\\x1f'\n' '\n'!'\n'\"'\n'#'\n'$'\n'%'\n'&'\n\"'\"\n'('\n')'\n'*'\n'+'\n','\n'-'\n'.'\n'/'\n'0'\n'1'\n'2'\n'3'\n'4'\n'5'\n'6'\n'7'\n'8'\n'9'\n':'\n';'\n'<'\n'='\n'>'\n'?'\n'@'\n'A'\n'B'\n'C'\n'D'\n'E'\n'F'\n'G'\n'H'\n'I'\n'J'\n'K'\n'L'\n'M'\n'N'\n'O'\n'P'\n'Q'\n'R'\n'S'\n'T'\n'U'\n'V'\n'W'\n'X'\n'Y'\n'Z'\n'['\n'\\\\'\n']'\n'^'\n'_'\n'`'\n'a'\n'b'\n'c'\n'd'\n'e'\n'f'\n'g'\n'h'\n'i'\n'j'\n'k'\n'l'\n'm'\n'n'\n'o'\n'p'\n'q'\n'r'\n's'\n't'\n'u'\n'v'\n'w'\n'x'\n'y'\n'z'\n'{'\n'|'\n'}'\n'~'\n'\\x7f'\n'\\u20ac'\n'\\ufffe'\n'\\u201a'\n'\\u0192'\n'\\u201e'\n'\\u2026'\n'\\u2020'\n'\\u2021'\n'\\u02c6'\n'\\u2030'\n'\\ufffe'\n'\\u2039'\n'\\u0152'\n'\\ufffe'\n'\\ufffe'\n'\\ufffe'\n'\\ufffe'\n'\\u2018'\n'\\u2019'\n'\\u201c'\n'\\u201d'\n'\\u2022'\n'\\u2013'\n'\\u2014'\n'\\u02dc'\n'\\u2122'\n'\\ufffe'\n'\\u203a'\n'\\u0153'\n'\\ufffe'\n'\\ufffe'\n'\\u0178'\n'\\xa0'\n'\\xa1'\n'\\xa2'\n'\\xa3'\n'\\xa4'\n'\\xa5'\n'\\xa6'\n'\\xa7'\n'\\xa8'\n'\\xa9'\n'\\xaa'\n'\\xab'\n'\\xac'\n'\\xad'\n'\\xae'\n'\\xaf'\n'\\xb0'\n'\\xb1'\n'\\xb2'\n'\\xb3'\n'\\xb4'\n'\\xb5'\n'\\xb6'\n'\\xb7'\n'\\xb8'\n'\\xb9'\n'\\xba'\n'\\xbb'\n'\\xbc'\n'\\xbd'\n'\\xbe'\n'\\xbf'\n'\\xc0'\n'\\xc1'\n'\\xc2'\n'\\u0102'\n'\\xc4'\n'\\xc5'\n'\\xc6'\n'\\xc7'\n'\\xc8'\n'\\xc9'\n'\\xca'\n'\\xcb'\n'\\u0300'\n'\\xcd'\n'\\xce'\n'\\xcf'\n'\\u0110'\n'\\xd1'\n'\\u0309'\n'\\xd3'\n'\\xd4'\n'\\u01a0'\n'\\xd6'\n'\\xd7'\n'\\xd8'\n'\\xd9'\n'\\xda'\n'\\xdb'\n'\\xdc'\n'\\u01af'\n'\\u0303'\n'\\xdf'\n'\\xe0'\n'\\xe1'\n'\\xe2'\n'\\u0103'\n'\\xe4'\n'\\xe5'\n'\\xe6'\n'\\xe7'\n'\\xe8'\n'\\xe9'\n'\\xea'\n'\\xeb'\n'\\u0301'\n'\\xed'\n'\\xee'\n'\\xef'\n'\\u0111'\n'\\xf1'\n'\\u0323'\n'\\xf3'\n'\\xf4'\n'\\u01a1'\n'\\xf6'\n'\\xf7'\n'\\xf8'\n'\\xf9'\n'\\xfa'\n'\\xfb'\n'\\xfc'\n'\\u01b0'\n'\\u20ab'\n'\\xff'\n)\n\n\nencoding_table=codecs.charmap_build(decoding_table)\n"], "linecache": [".py", "''\n\n\n\n\n\n\nimport sys\nimport os\nimport tokenize\n\n__all__=[\"getline\",\"clearcache\",\"checkcache\"]\n\ndef getline(filename,lineno,module_globals=None ):\n lines=getlines(filename,module_globals)\n if 1 <=lineno <=len(lines):\n return lines[lineno -1]\n else :\n return''\n \n \n \n \ncache={}\n\n\ndef clearcache():\n ''\n \n global cache\n cache={}\n \n \ndef getlines(filename,module_globals=None ):\n ''\n \n \n if filename in cache:\n return cache[filename][2]\n else :\n return updatecache(filename,module_globals)\n \n \ndef checkcache(filename=None ):\n ''\n \n \n if filename is None :\n filenames=list(cache.keys())\n else :\n if filename in cache:\n filenames=[filename]\n else :\n return\n \n for filename in filenames:\n size,mtime,lines,fullname=cache[filename]\n if mtime is None :\n continue\n try :\n stat=os.stat(fullname)\n except os.error:\n del cache[filename]\n continue\n if size !=stat.st_size or mtime !=stat.st_mtime:\n del cache[filename]\n \n \ndef updatecache(filename,module_globals=None ):\n ''\n\n \n \n if filename in cache:\n del cache[filename]\n if not filename or (filename.startswith('<')and filename.endswith('>')):\n return []\n \n fullname=filename\n try :\n stat=os.stat(fullname)\n except OSError:\n basename=filename\n \n \n if module_globals and'__loader__'in module_globals:\n name=module_globals.get('__name__')\n loader=module_globals['__loader__']\n get_source=getattr(loader,'get_source',None )\n \n if name and get_source:\n try :\n data=get_source(name)\n except (ImportError,IOError):\n pass\n else :\n if data is None :\n \n \n return []\n cache[filename]=(\n len(data),None ,\n [line+'\\n'for line in data.splitlines()],fullname\n )\n return cache[filename][2]\n \n \n \n if os.path.isabs(filename):\n return []\n \n for dirname in sys.path:\n try :\n fullname=os.path.join(dirname,basename)\n except (TypeError,AttributeError):\n \n continue\n try :\n stat=os.stat(fullname)\n break\n except os.error:\n pass\n else :\n return []\n try :\n with tokenize.open(fullname)as fp:\n lines=fp.readlines()\n except IOError:\n return []\n if lines and not lines[-1].endswith('\\n'):\n lines[-1]+='\\n'\n size,mtime=stat.st_size,stat.st_mtime\n cache[filename]=size,mtime,lines,fullname\n return lines\n"], "asyncio.streams": [".py", "''\n\n__all__=['StreamReader','StreamWriter','StreamReaderProtocol',\n'open_connection','start_server',\n'IncompleteReadError',\n]\n\nimport socket\n\nif hasattr(socket,'AF_UNIX'):\n __all__.extend(['open_unix_connection','start_unix_server'])\n \nfrom .import coroutines\nfrom .import events\nfrom .import futures\nfrom .import protocols\nfrom .coroutines import coroutine\nfrom .log import logger\n\n\n_DEFAULT_LIMIT=2 **16\n\n\nclass IncompleteReadError(EOFError):\n ''\n\n\n\n\n \n def __init__(self,partial,expected):\n EOFError.__init__(self,\"%s bytes read on a total of %s expected bytes\"\n %(len(partial),expected))\n self.partial=partial\n self.expected=expected\n \n \n@coroutine\ndef open_connection(host=None ,port=None ,*,\nloop=None ,limit=_DEFAULT_LIMIT,**kwds):\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n if loop is None :\n loop=events.get_event_loop()\n reader=StreamReader(limit=limit,loop=loop)\n protocol=StreamReaderProtocol(reader,loop=loop)\n transport,_=yield from loop.create_connection(\n lambda :protocol,host,port,**kwds)\n writer=StreamWriter(transport,protocol,reader,loop)\n return reader,writer\n \n \n@coroutine\ndef start_server(client_connected_cb,host=None ,port=None ,*,\nloop=None ,limit=_DEFAULT_LIMIT,**kwds):\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n if loop is None :\n loop=events.get_event_loop()\n \n def factory():\n reader=StreamReader(limit=limit,loop=loop)\n protocol=StreamReaderProtocol(reader,client_connected_cb,\n loop=loop)\n return protocol\n \n return (yield from loop.create_server(factory,host,port,**kwds))\n \n \nif hasattr(socket,'AF_UNIX'):\n\n\n @coroutine\n def open_unix_connection(path=None ,*,\n loop=None ,limit=_DEFAULT_LIMIT,**kwds):\n ''\n if loop is None :\n loop=events.get_event_loop()\n reader=StreamReader(limit=limit,loop=loop)\n protocol=StreamReaderProtocol(reader,loop=loop)\n transport,_=yield from loop.create_unix_connection(\n lambda :protocol,path,**kwds)\n writer=StreamWriter(transport,protocol,reader,loop)\n return reader,writer\n \n \n @coroutine\n def start_unix_server(client_connected_cb,path=None ,*,\n loop=None ,limit=_DEFAULT_LIMIT,**kwds):\n ''\n if loop is None :\n loop=events.get_event_loop()\n \n def factory():\n reader=StreamReader(limit=limit,loop=loop)\n protocol=StreamReaderProtocol(reader,client_connected_cb,\n loop=loop)\n return protocol\n \n return (yield from loop.create_unix_server(factory,path,**kwds))\n \n \nclass FlowControlMixin(protocols.Protocol):\n ''\n\n\n\n\n\n\n \n \n def __init__(self,loop=None ):\n if loop is None :\n self._loop=events.get_event_loop()\n else :\n self._loop=loop\n self._paused=False\n self._drain_waiter=None\n self._connection_lost=False\n \n def pause_writing(self):\n assert not self._paused\n self._paused=True\n if self._loop.get_debug():\n logger.debug(\"%r pauses writing\",self)\n \n def resume_writing(self):\n assert self._paused\n self._paused=False\n if self._loop.get_debug():\n logger.debug(\"%r resumes writing\",self)\n \n waiter=self._drain_waiter\n if waiter is not None :\n self._drain_waiter=None\n if not waiter.done():\n waiter.set_result(None )\n \n def connection_lost(self,exc):\n self._connection_lost=True\n \n if not self._paused:\n return\n waiter=self._drain_waiter\n if waiter is None :\n return\n self._drain_waiter=None\n if waiter.done():\n return\n if exc is None :\n waiter.set_result(None )\n else :\n waiter.set_exception(exc)\n \n @coroutine\n def _drain_helper(self):\n if self._connection_lost:\n raise ConnectionResetError('Connection lost')\n if not self._paused:\n return\n waiter=self._drain_waiter\n assert waiter is None or waiter.cancelled()\n waiter=futures.Future(loop=self._loop)\n self._drain_waiter=waiter\n yield from waiter\n \n \nclass StreamReaderProtocol(FlowControlMixin,protocols.Protocol):\n ''\n\n\n\n\n\n \n \n def __init__(self,stream_reader,client_connected_cb=None ,loop=None ):\n super().__init__(loop=loop)\n self._stream_reader=stream_reader\n self._stream_writer=None\n self._client_connected_cb=client_connected_cb\n \n def connection_made(self,transport):\n self._stream_reader.set_transport(transport)\n if self._client_connected_cb is not None :\n self._stream_writer=StreamWriter(transport,self,\n self._stream_reader,\n self._loop)\n res=self._client_connected_cb(self._stream_reader,\n self._stream_writer)\n if coroutines.iscoroutine(res):\n self._loop.create_task(res)\n \n def connection_lost(self,exc):\n if exc is None :\n self._stream_reader.feed_eof()\n else :\n self._stream_reader.set_exception(exc)\n super().connection_lost(exc)\n \n def data_received(self,data):\n self._stream_reader.feed_data(data)\n \n def eof_received(self):\n self._stream_reader.feed_eof()\n \n \nclass StreamWriter:\n ''\n\n\n\n\n\n\n \n \n def __init__(self,transport,protocol,reader,loop):\n self._transport=transport\n self._protocol=protocol\n \n assert reader is None or isinstance(reader,StreamReader)\n self._reader=reader\n self._loop=loop\n \n def __repr__(self):\n info=[self.__class__.__name__,'transport=%r'%self._transport]\n if self._reader is not None :\n info.append('reader=%r'%self._reader)\n return'<%s>'%' '.join(info)\n \n @property\n def transport(self):\n return self._transport\n \n def write(self,data):\n self._transport.write(data)\n \n def writelines(self,data):\n self._transport.writelines(data)\n \n def write_eof(self):\n return self._transport.write_eof()\n \n def can_write_eof(self):\n return self._transport.can_write_eof()\n \n def close(self):\n return self._transport.close()\n \n def get_extra_info(self,name,default=None ):\n return self._transport.get_extra_info(name,default)\n \n @coroutine\n def drain(self):\n ''\n\n\n\n\n\n \n if self._reader is not None :\n exc=self._reader.exception()\n if exc is not None :\n raise exc\n yield from self._protocol._drain_helper()\n \n \nclass StreamReader:\n\n def __init__(self,limit=_DEFAULT_LIMIT,loop=None ):\n \n \n self._limit=limit\n if loop is None :\n self._loop=events.get_event_loop()\n else :\n self._loop=loop\n self._buffer=bytearray()\n self._eof=False\n self._waiter=None\n self._exception=None\n self._transport=None\n self._paused=False\n \n def exception(self):\n return self._exception\n \n def set_exception(self,exc):\n self._exception=exc\n \n waiter=self._waiter\n if waiter is not None :\n self._waiter=None\n if not waiter.cancelled():\n waiter.set_exception(exc)\n \n def _wakeup_waiter(self):\n ''\n waiter=self._waiter\n if waiter is not None :\n self._waiter=None\n if not waiter.cancelled():\n waiter.set_result(None )\n \n def set_transport(self,transport):\n assert self._transport is None ,'Transport already set'\n self._transport=transport\n \n def _maybe_resume_transport(self):\n if self._paused and len(self._buffer)<=self._limit:\n self._paused=False\n self._transport.resume_reading()\n \n def feed_eof(self):\n self._eof=True\n self._wakeup_waiter()\n \n def at_eof(self):\n ''\n return self._eof and not self._buffer\n \n def feed_data(self,data):\n assert not self._eof,'feed_data after feed_eof'\n \n if not data:\n return\n \n self._buffer.extend(data)\n self._wakeup_waiter()\n \n if (self._transport is not None and\n not self._paused and\n len(self._buffer)>2 *self._limit):\n try :\n self._transport.pause_reading()\n except NotImplementedError:\n \n \n \n self._transport=None\n else :\n self._paused=True\n \n def _wait_for_data(self,func_name):\n ''\n \n \n \n \n if self._waiter is not None :\n raise RuntimeError('%s() called while another coroutine is '\n 'already waiting for incoming data'%func_name)\n \n self._waiter=futures.Future(loop=self._loop)\n try :\n yield from self._waiter\n finally :\n self._waiter=None\n \n @coroutine\n def readline(self):\n if self._exception is not None :\n raise self._exception\n \n line=bytearray()\n not_enough=True\n \n while not_enough:\n while self._buffer and not_enough:\n ichar=self._buffer.find(b'\\n')\n if ichar <0:\n line.extend(self._buffer)\n self._buffer.clear()\n else :\n ichar +=1\n line.extend(self._buffer[:ichar])\n del self._buffer[:ichar]\n not_enough=False\n \n if len(line)>self._limit:\n self._maybe_resume_transport()\n raise ValueError('Line is too long')\n \n if self._eof:\n break\n \n if not_enough:\n yield from self._wait_for_data('readline')\n \n self._maybe_resume_transport()\n return bytes(line)\n \n @coroutine\n def read(self,n=-1):\n if self._exception is not None :\n raise self._exception\n \n if not n:\n return b''\n \n if n <0:\n \n \n \n \n blocks=[]\n while True :\n block=yield from self.read(self._limit)\n if not block:\n break\n blocks.append(block)\n return b''.join(blocks)\n else :\n if not self._buffer and not self._eof:\n yield from self._wait_for_data('read')\n \n if n <0 or len(self._buffer)<=n:\n data=bytes(self._buffer)\n self._buffer.clear()\n else :\n \n data=bytes(self._buffer[:n])\n del self._buffer[:n]\n \n self._maybe_resume_transport()\n return data\n \n @coroutine\n def readexactly(self,n):\n if self._exception is not None :\n raise self._exception\n \n \n \n \n \n \n \n \n blocks=[]\n while n >0:\n block=yield from self.read(n)\n if not block:\n partial=b''.join(blocks)\n raise IncompleteReadError(partial,len(partial)+n)\n blocks.append(block)\n n -=len(block)\n \n return b''.join(blocks)\n"], "encodings.cp037": [".py", "''\n\n\n\nimport codecs\n\n\n\nclass Codec(codecs.Codec):\n\n def encode(self,input,errors='strict'):\n return codecs.charmap_encode(input,errors,encoding_table)\n \n def decode(self,input,errors='strict'):\n return codecs.charmap_decode(input,errors,decoding_table)\n \nclass IncrementalEncoder(codecs.IncrementalEncoder):\n def encode(self,input,final=False ):\n return codecs.charmap_encode(input,self.errors,encoding_table)[0]\n \nclass IncrementalDecoder(codecs.IncrementalDecoder):\n def decode(self,input,final=False ):\n return codecs.charmap_decode(input,self.errors,decoding_table)[0]\n \nclass StreamWriter(Codec,codecs.StreamWriter):\n pass\n \nclass StreamReader(Codec,codecs.StreamReader):\n pass\n \n \n \ndef getregentry():\n return codecs.CodecInfo(\n name='cp037',\n encode=Codec().encode,\n decode=Codec().decode,\n incrementalencoder=IncrementalEncoder,\n incrementaldecoder=IncrementalDecoder,\n streamreader=StreamReader,\n streamwriter=StreamWriter,\n )\n \n \n \n \ndecoding_table=(\n'\\x00'\n'\\x01'\n'\\x02'\n'\\x03'\n'\\x9c'\n'\\t'\n'\\x86'\n'\\x7f'\n'\\x97'\n'\\x8d'\n'\\x8e'\n'\\x0b'\n'\\x0c'\n'\\r'\n'\\x0e'\n'\\x0f'\n'\\x10'\n'\\x11'\n'\\x12'\n'\\x13'\n'\\x9d'\n'\\x85'\n'\\x08'\n'\\x87'\n'\\x18'\n'\\x19'\n'\\x92'\n'\\x8f'\n'\\x1c'\n'\\x1d'\n'\\x1e'\n'\\x1f'\n'\\x80'\n'\\x81'\n'\\x82'\n'\\x83'\n'\\x84'\n'\\n'\n'\\x17'\n'\\x1b'\n'\\x88'\n'\\x89'\n'\\x8a'\n'\\x8b'\n'\\x8c'\n'\\x05'\n'\\x06'\n'\\x07'\n'\\x90'\n'\\x91'\n'\\x16'\n'\\x93'\n'\\x94'\n'\\x95'\n'\\x96'\n'\\x04'\n'\\x98'\n'\\x99'\n'\\x9a'\n'\\x9b'\n'\\x14'\n'\\x15'\n'\\x9e'\n'\\x1a'\n' '\n'\\xa0'\n'\\xe2'\n'\\xe4'\n'\\xe0'\n'\\xe1'\n'\\xe3'\n'\\xe5'\n'\\xe7'\n'\\xf1'\n'\\xa2'\n'.'\n'<'\n'('\n'+'\n'|'\n'&'\n'\\xe9'\n'\\xea'\n'\\xeb'\n'\\xe8'\n'\\xed'\n'\\xee'\n'\\xef'\n'\\xec'\n'\\xdf'\n'!'\n'$'\n'*'\n')'\n';'\n'\\xac'\n'-'\n'/'\n'\\xc2'\n'\\xc4'\n'\\xc0'\n'\\xc1'\n'\\xc3'\n'\\xc5'\n'\\xc7'\n'\\xd1'\n'\\xa6'\n','\n'%'\n'_'\n'>'\n'?'\n'\\xf8'\n'\\xc9'\n'\\xca'\n'\\xcb'\n'\\xc8'\n'\\xcd'\n'\\xce'\n'\\xcf'\n'\\xcc'\n'`'\n':'\n'#'\n'@'\n\"'\"\n'='\n'\"'\n'\\xd8'\n'a'\n'b'\n'c'\n'd'\n'e'\n'f'\n'g'\n'h'\n'i'\n'\\xab'\n'\\xbb'\n'\\xf0'\n'\\xfd'\n'\\xfe'\n'\\xb1'\n'\\xb0'\n'j'\n'k'\n'l'\n'm'\n'n'\n'o'\n'p'\n'q'\n'r'\n'\\xaa'\n'\\xba'\n'\\xe6'\n'\\xb8'\n'\\xc6'\n'\\xa4'\n'\\xb5'\n'~'\n's'\n't'\n'u'\n'v'\n'w'\n'x'\n'y'\n'z'\n'\\xa1'\n'\\xbf'\n'\\xd0'\n'\\xdd'\n'\\xde'\n'\\xae'\n'^'\n'\\xa3'\n'\\xa5'\n'\\xb7'\n'\\xa9'\n'\\xa7'\n'\\xb6'\n'\\xbc'\n'\\xbd'\n'\\xbe'\n'['\n']'\n'\\xaf'\n'\\xa8'\n'\\xb4'\n'\\xd7'\n'{'\n'A'\n'B'\n'C'\n'D'\n'E'\n'F'\n'G'\n'H'\n'I'\n'\\xad'\n'\\xf4'\n'\\xf6'\n'\\xf2'\n'\\xf3'\n'\\xf5'\n'}'\n'J'\n'K'\n'L'\n'M'\n'N'\n'O'\n'P'\n'Q'\n'R'\n'\\xb9'\n'\\xfb'\n'\\xfc'\n'\\xf9'\n'\\xfa'\n'\\xff'\n'\\\\'\n'\\xf7'\n'S'\n'T'\n'U'\n'V'\n'W'\n'X'\n'Y'\n'Z'\n'\\xb2'\n'\\xd4'\n'\\xd6'\n'\\xd2'\n'\\xd3'\n'\\xd5'\n'0'\n'1'\n'2'\n'3'\n'4'\n'5'\n'6'\n'7'\n'8'\n'9'\n'\\xb3'\n'\\xdb'\n'\\xdc'\n'\\xd9'\n'\\xda'\n'\\x9f'\n)\n\n\nencoding_table=codecs.charmap_build(decoding_table)\n"], "traceback": [".py", "import sys\nfrom browser import console\n\ndef print_exc(file=sys.stderr):\n exc=__BRYTHON__.current_exception\n if isinstance(exc,SyntaxError):\n file.write('\\n module %s line %s'%(exc.args[1],exc.args[2]))\n offset=exc.args[3]\n file.write('\\n '+exc.args[4])\n file.write('\\n '+offset *' '+'^')\n else :\n file.write(exc.info)\n file.write('\\n'+exc.__name__)\n if exc.args:\n file.write(': %s'%exc.args[0])\n file.write('\\n')\n \ndef format_exc(limit=None ,chain=True ):\n exc=__BRYTHON__.current_exception\n res=exc.info+'\\n'+exc.__name__\n if exc.args:\n res +=': '+exc.args[0]\n return res+'\\n'\n \ndef format_exception(_type,value,tb,limit=None ,chain=True ):\n return ['%s\\n'%_type,'%s\\n'%value]\n \ndef extract_tb(tb,limit=None ):\n return tb"], "asyncio.events": [".py", "''\n\n__all__=['AbstractEventLoopPolicy',\n'AbstractEventLoop','AbstractServer',\n'Handle','TimerHandle',\n'get_event_loop_policy','set_event_loop_policy',\n'get_event_loop','set_event_loop','new_event_loop',\n'get_child_watcher','set_child_watcher',\n]\n\nimport functools\nimport inspect\nimport reprlib\nimport socket\nimport subprocess\nimport sys\nimport threading\nimport traceback\n\n\n_PY34=sys.version_info >=(3,4)\n\n\ndef _get_function_source(func):\n if _PY34:\n func=inspect.unwrap(func)\n elif hasattr(func,'__wrapped__'):\n func=func.__wrapped__\n if inspect.isfunction(func):\n code=func.__code__\n return (code.co_filename,code.co_firstlineno)\n if isinstance(func,functools.partial):\n return _get_function_source(func.func)\n if _PY34 and isinstance(func,functools.partialmethod):\n return _get_function_source(func.func)\n return None\n \n \ndef _format_args(args):\n ''\n\n\n \n \n args_repr=reprlib.repr(args)\n if len(args)==1 and args_repr.endswith(',)'):\n args_repr=args_repr[:-2]+')'\n return args_repr\n \n \ndef _format_callback(func,args,suffix=''):\n if isinstance(func,functools.partial):\n if args is not None :\n suffix=_format_args(args)+suffix\n return _format_callback(func.func,func.args,suffix)\n \n func_repr=getattr(func,'__qualname__',None )\n if not func_repr:\n func_repr=repr(func)\n \n if args is not None :\n func_repr +=_format_args(args)\n if suffix:\n func_repr +=suffix\n \n source=_get_function_source(func)\n if source:\n func_repr +=' at %s:%s'%source\n return func_repr\n \n \nclass Handle:\n ''\n \n __slots__=('_callback','_args','_cancelled','_loop',\n '_source_traceback','_repr','__weakref__')\n \n def __init__(self,callback,args,loop):\n assert not isinstance(callback,Handle),'A Handle is not a callback'\n self._loop=loop\n self._callback=callback\n self._args=args\n self._cancelled=False\n self._repr=None\n if self._loop.get_debug():\n self._source_traceback=traceback.extract_stack(sys._getframe(1))\n else :\n self._source_traceback=None\n \n def _repr_info(self):\n info=[self.__class__.__name__]\n if self._cancelled:\n info.append('cancelled')\n if self._callback is not None :\n info.append(_format_callback(self._callback,self._args))\n if self._source_traceback:\n frame=self._source_traceback[-1]\n info.append('created at %s:%s'%(frame[0],frame[1]))\n return info\n \n def __repr__(self):\n if self._repr is not None :\n return self._repr\n info=self._repr_info()\n return'<%s>'%' '.join(info)\n \n def cancel(self):\n if not self._cancelled:\n self._cancelled=True\n if self._loop.get_debug():\n \n \n \n self._repr=repr(self)\n self._callback=None\n self._args=None\n \n def _run(self):\n try :\n self._callback(*self._args)\n except Exception as exc:\n cb=_format_callback(self._callback,self._args)\n msg='Exception in callback {}'.format(cb)\n context={\n 'message':msg,\n 'exception':exc,\n 'handle':self,\n }\n if self._source_traceback:\n context['source_traceback']=self._source_traceback\n self._loop.call_exception_handler(context)\n self=None\n \n \nclass TimerHandle(Handle):\n ''\n \n __slots__=['_scheduled','_when']\n \n def __init__(self,when,callback,args,loop):\n assert when is not None\n super().__init__(callback,args,loop)\n if self._source_traceback:\n del self._source_traceback[-1]\n self._when=when\n self._scheduled=False\n \n def _repr_info(self):\n info=super()._repr_info()\n pos=2 if self._cancelled else 1\n info.insert(pos,'when=%s'%self._when)\n return info\n \n def __hash__(self):\n return hash(self._when)\n \n def __lt__(self,other):\n return self._when other._when\n \n def __ge__(self,other):\n if self._when >other._when:\n return True\n return self.__eq__(other)\n \n def __eq__(self,other):\n if isinstance(other,TimerHandle):\n return (self._when ==other._when and\n self._callback ==other._callback and\n self._args ==other._args and\n self._cancelled ==other._cancelled)\n return NotImplemented\n \n def __ne__(self,other):\n equal=self.__eq__(other)\n return NotImplemented if equal is NotImplemented else not equal\n \n def cancel(self):\n if not self._cancelled:\n self._loop._timer_handle_cancelled(self)\n super().cancel()\n \n \nclass AbstractServer:\n ''\n \n def close(self):\n ''\n return NotImplemented\n \n def wait_closed(self):\n ''\n return NotImplemented\n \n \nclass AbstractEventLoop:\n ''\n \n \n \n def run_forever(self):\n ''\n raise NotImplementedError\n \n def run_until_complete(self,future):\n ''\n\n\n \n raise NotImplementedError\n \n def stop(self):\n ''\n\n\n\n \n raise NotImplementedError\n \n def is_running(self):\n ''\n raise NotImplementedError\n \n def is_closed(self):\n ''\n raise NotImplementedError\n \n def close(self):\n ''\n\n\n\n\n\n\n \n raise NotImplementedError\n \n \n \n def _timer_handle_cancelled(self,handle):\n ''\n raise NotImplementedError\n \n def call_soon(self,callback,*args):\n return self.call_later(0,callback,*args)\n \n def call_later(self,delay,callback,*args):\n raise NotImplementedError\n \n def call_at(self,when,callback,*args):\n raise NotImplementedError\n \n def time(self):\n raise NotImplementedError\n \n \n \n def create_task(self,coro):\n raise NotImplementedError\n \n \n \n def call_soon_threadsafe(self,callback,*args):\n raise NotImplementedError\n \n def run_in_executor(self,executor,callback,*args):\n raise NotImplementedError\n \n def set_default_executor(self,executor):\n raise NotImplementedError\n \n \n \n def getaddrinfo(self,host,port,*,family=0,type=0,proto=0,flags=0):\n raise NotImplementedError\n \n def getnameinfo(self,sockaddr,flags=0):\n raise NotImplementedError\n \n def create_connection(self,protocol_factory,host=None ,port=None ,*,\n ssl=None ,family=0,proto=0,flags=0,sock=None ,\n local_addr=None ,server_hostname=None ):\n raise NotImplementedError\n \n def create_server(self,protocol_factory,host=None ,port=None ,*,\n family=socket.AF_UNSPEC,flags=socket.AI_PASSIVE,\n sock=None ,backlog=100,ssl=None ,reuse_address=None ):\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n raise NotImplementedError\n \n def create_unix_connection(self,protocol_factory,path,*,\n ssl=None ,sock=None ,\n server_hostname=None ):\n raise NotImplementedError\n \n def create_unix_server(self,protocol_factory,path,*,\n sock=None ,backlog=100,ssl=None ):\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n raise NotImplementedError\n \n def create_datagram_endpoint(self,protocol_factory,\n local_addr=None ,remote_addr=None ,*,\n family=0,proto=0,flags=0):\n raise NotImplementedError\n \n \n \n def connect_read_pipe(self,protocol_factory,pipe):\n ''\n\n\n\n\n \n \n \n \n \n raise NotImplementedError\n \n def connect_write_pipe(self,protocol_factory,pipe):\n ''\n\n\n\n\n \n \n \n \n \n raise NotImplementedError\n \n def subprocess_shell(self,protocol_factory,cmd,*,stdin=subprocess.PIPE,\n stdout=subprocess.PIPE,stderr=subprocess.PIPE,\n **kwargs):\n raise NotImplementedError\n \n def subprocess_exec(self,protocol_factory,*args,stdin=subprocess.PIPE,\n stdout=subprocess.PIPE,stderr=subprocess.PIPE,\n **kwargs):\n raise NotImplementedError\n \n \n \n \n \n \n def add_reader(self,fd,callback,*args):\n raise NotImplementedError\n \n def remove_reader(self,fd):\n raise NotImplementedError\n \n def add_writer(self,fd,callback,*args):\n raise NotImplementedError\n \n def remove_writer(self,fd):\n raise NotImplementedError\n \n \n \n def sock_recv(self,sock,nbytes):\n raise NotImplementedError\n \n def sock_sendall(self,sock,data):\n raise NotImplementedError\n \n def sock_connect(self,sock,address):\n raise NotImplementedError\n \n def sock_accept(self,sock):\n raise NotImplementedError\n \n \n \n def add_signal_handler(self,sig,callback,*args):\n raise NotImplementedError\n \n def remove_signal_handler(self,sig):\n raise NotImplementedError\n \n \n \n def set_exception_handler(self,handler):\n raise NotImplementedError\n \n def default_exception_handler(self,context):\n raise NotImplementedError\n \n def call_exception_handler(self,context):\n raise NotImplementedError\n \n \n \n def get_debug(self):\n raise NotImplementedError\n \n def set_debug(self,enabled):\n raise NotImplementedError\n \n \nclass AbstractEventLoopPolicy:\n ''\n \n def get_event_loop(self):\n ''\n\n\n\n\n\n \n raise NotImplementedError\n \n def set_event_loop(self,loop):\n ''\n raise NotImplementedError\n \n def new_event_loop(self):\n ''\n\n \n raise NotImplementedError\n \n \n \n def get_child_watcher(self):\n ''\n raise NotImplementedError\n \n def set_child_watcher(self,watcher):\n ''\n raise NotImplementedError\n \n \nclass BaseDefaultEventLoopPolicy(AbstractEventLoopPolicy):\n ''\n\n\n\n\n\n\n\n\n\n \n \n _loop_factory=None\n \n class _Local(threading.local):\n _loop=None\n _set_called=False\n \n def __init__(self):\n self._local=self._Local()\n \n def get_event_loop(self):\n ''\n\n\n \n if (self._local._loop is None and\n not self._local._set_called and\n isinstance(threading.current_thread(),threading._MainThread)):\n self.set_event_loop(self.new_event_loop())\n if self._local._loop is None :\n raise RuntimeError('There is no current event loop in thread %r.'\n %threading.current_thread().name)\n return self._local._loop\n \n def set_event_loop(self,loop):\n ''\n self._local._set_called=True\n assert loop is None or isinstance(loop,AbstractEventLoop)\n self._local._loop=loop\n \n def new_event_loop(self):\n ''\n\n\n\n \n return self._loop_factory()\n \n \n \n \n \n \n_event_loop_policy=None\n\n\n_lock=threading.Lock()\n\n\ndef _init_event_loop_policy():\n global _event_loop_policy\n with _lock:\n if _event_loop_policy is None :\n from .import DefaultEventLoopPolicy\n _event_loop_policy=DefaultEventLoopPolicy()\n \n \ndef get_event_loop_policy():\n ''\n if _event_loop_policy is None :\n _init_event_loop_policy()\n return _event_loop_policy\n \n \ndef set_event_loop_policy(policy):\n ''\n\n \n global _event_loop_policy\n assert policy is None or isinstance(policy,AbstractEventLoopPolicy)\n _event_loop_policy=policy\n \n \ndef get_event_loop():\n ''\n return get_event_loop_policy().get_event_loop()\n \n \ndef set_event_loop(loop):\n ''\n get_event_loop_policy().set_event_loop(loop)\n \n \ndef new_event_loop():\n ''\n return get_event_loop_policy().new_event_loop()\n \n \ndef get_child_watcher():\n ''\n return get_event_loop_policy().get_child_watcher()\n \n \ndef set_child_watcher(watcher):\n ''\n \n return get_event_loop_policy().set_child_watcher(watcher)\n"], "heapq": [".py", "''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n__about__=\"\"\"Heap queues\n\n[explanation by Fran\u00e7ois Pinard]\n\nHeaps are arrays for which a[k] <= a[2*k+1] and a[k] <= a[2*k+2] for\nall k, counting elements from 0. For the sake of comparison,\nnon-existing elements are considered to be infinite. The interesting\nproperty of a heap is that a[0] is always its smallest element.\n\nThe strange invariant above is meant to be an efficient memory\nrepresentation for a tournament. The numbers below are `k', not a[k]:\n\n 0\n\n 1 2\n\n 3 4 5 6\n\n 7 8 9 10 11 12 13 14\n\n 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30\n\n\nIn the tree above, each cell `k' is topping `2*k+1' and `2*k+2'. In\nan usual binary tournament we see in sports, each cell is the winner\nover the two cells it tops, and we can trace the winner down the tree\nto see all opponents s/he had. However, in many computer applications\nof such tournaments, we do not need to trace the history of a winner.\nTo be more memory efficient, when a winner is promoted, we try to\nreplace it by something else at a lower level, and the rule becomes\nthat a cell and the two cells it tops contain three different items,\nbut the top cell \"wins\" over the two topped cells.\n\nIf this heap invariant is protected at all time, index 0 is clearly\nthe overall winner. The simplest algorithmic way to remove it and\nfind the \"next\" winner is to move some loser (let's say cell 30 in the\ndiagram above) into the 0 position, and then percolate this new 0 down\nthe tree, exchanging values, until the invariant is re-established.\nThis is clearly logarithmic on the total number of items in the tree.\nBy iterating over all items, you get an O(n ln n) sort.\n\nA nice feature of this sort is that you can efficiently insert new\nitems while the sort is going on, provided that the inserted items are\nnot \"better\" than the last 0'th element you extracted. This is\nespecially useful in simulation contexts, where the tree holds all\nincoming events, and the \"win\" condition means the smallest scheduled\ntime. When an event schedule other events for execution, they are\nscheduled into the future, so they can easily go into the heap. So, a\nheap is a good structure for implementing schedulers (this is what I\nused for my MIDI sequencer :-).\n\nVarious structures for implementing schedulers have been extensively\nstudied, and heaps are good for this, as they are reasonably speedy,\nthe speed is almost constant, and the worst case is not much different\nthan the average case. However, there are other representations which\nare more efficient overall, yet the worst cases might be terrible.\n\nHeaps are also very useful in big disk sorts. You most probably all\nknow that a big sort implies producing \"runs\" (which are pre-sorted\nsequences, which size is usually related to the amount of CPU memory),\nfollowed by a merging passes for these runs, which merging is often\nvery cleverly organised[1]. It is very important that the initial\nsort produces the longest runs possible. Tournaments are a good way\nto that. If, using all the memory available to hold a tournament, you\nreplace and percolate items that happen to fit the current run, you'll\nproduce runs which are twice the size of the memory for random input,\nand much better for input fuzzily ordered.\n\nMoreover, if you output the 0'th item on disk and get an input which\nmay not fit in the current tournament (because the value \"wins\" over\nthe last output value), it cannot fit in the heap, so the size of the\nheap decreases. The freed memory could be cleverly reused immediately\nfor progressively building a second heap, which grows at exactly the\nsame rate the first heap is melting. When the first heap completely\nvanishes, you switch heaps and start a new run. Clever and quite\neffective!\n\nIn a word, heaps are useful memory structures to know. I use them in\na few applications, and I think it is good to keep a `heap' module\naround. :-)\n\n--------------------\n[1] The disk balancing algorithms which are current, nowadays, are\nmore annoying than clever, and this is a consequence of the seeking\ncapabilities of the disks. On devices which cannot seek, like big\ntape drives, the story was quite different, and one had to be very\nclever to ensure (far in advance) that each tape movement will be the\nmost effective possible (that is, will best participate at\n\"progressing\" the merge). Some tapes were even able to read\nbackwards, and this was also used to avoid the rewinding time.\nBelieve me, real good tape sorts were quite spectacular to watch!\nFrom all times, sorting has always been a Great Art! :-)\n\"\"\"\n\n__all__=['heappush','heappop','heapify','heapreplace','merge',\n'nlargest','nsmallest','heappushpop']\n\nfrom itertools import islice,count,tee,chain\n\ndef heappush(heap,item):\n ''\n heap.append(item)\n _siftdown(heap,0,len(heap)-1)\n \ndef heappop(heap):\n ''\n lastelt=heap.pop()\n if heap:\n returnitem=heap[0]\n heap[0]=lastelt\n _siftup(heap,0)\n else :\n returnitem=lastelt\n return returnitem\n \ndef heapreplace(heap,item):\n ''\n\n\n\n\n\n\n\n\n \n returnitem=heap[0]\n heap[0]=item\n _siftup(heap,0)\n return returnitem\n \ndef heappushpop(heap,item):\n ''\n if heap and heap[0]startpos:\n parentpos=(pos -1)>>1\n parent=heap[parentpos]\n if newitem startpos:\n parentpos=(pos -1)>>1\n parent=heap[parentpos]\n if parent 1:\n try :\n while True :\n v,itnum,next=s=h[0]\n yield v\n s[0]=next()\n _heapreplace(h,s)\n except _StopIteration:\n _heappop(h)\n if h:\n \n v,itnum,next=h[0]\n yield v\n yield from next.__self__\n \n \n_nsmallest=nsmallest\ndef nsmallest(n,iterable,key=None ):\n ''\n\n\n \n \n if n ==1:\n it=iter(iterable)\n head=list(islice(it,1))\n if not head:\n return []\n if key is None :\n return [min(chain(head,it))]\n return [min(chain(head,it),key=key)]\n \n \n try :\n size=len(iterable)\n except (TypeError,AttributeError):\n pass\n else :\n if n >=size:\n return sorted(iterable,key=key)[:n]\n \n \n if key is None :\n it=zip(iterable,count())\n result=_nsmallest(n,it)\n return [r[0]for r in result]\n \n \n in1,in2=tee(iterable)\n it=zip(map(key,in1),count(),in2)\n result=_nsmallest(n,it)\n return [r[2]for r in result]\n \n_nlargest=nlargest\ndef nlargest(n,iterable,key=None ):\n ''\n\n\n \n \n \n if n ==1:\n it=iter(iterable)\n head=list(islice(it,1))\n if not head:\n return []\n if key is None :\n return [max(chain(head,it))]\n return [max(chain(head,it),key=key)]\n \n \n try :\n size=len(iterable)\n except (TypeError,AttributeError):\n pass\n else :\n if n >=size:\n return sorted(iterable,key=key,reverse=True )[:n]\n \n \n if key is None :\n it=zip(iterable,count(0,-1))\n result=_nlargest(n,it)\n return [r[0]for r in result]\n \n \n in1,in2=tee(iterable)\n it=zip(map(key,in1),count(0,-1),in2)\n result=_nlargest(n,it)\n return [r[2]for r in result]\n \nif __name__ ==\"__main__\":\n\n heap=[]\n data=[1,3,5,7,9,2,4,6,8,0]\n for item in data:\n heappush(heap,item)\n sort=[]\n while heap:\n sort.append(heappop(heap))\n print(sort)\n \n import doctest\n doctest.testmod()\n"], "encodings.mac_cyrillic": [".py", "''\n\n\n\nimport codecs\n\n\n\nclass Codec(codecs.Codec):\n\n def encode(self,input,errors='strict'):\n return codecs.charmap_encode(input,errors,encoding_table)\n \n def decode(self,input,errors='strict'):\n return codecs.charmap_decode(input,errors,decoding_table)\n \nclass IncrementalEncoder(codecs.IncrementalEncoder):\n def encode(self,input,final=False ):\n return codecs.charmap_encode(input,self.errors,encoding_table)[0]\n \nclass IncrementalDecoder(codecs.IncrementalDecoder):\n def decode(self,input,final=False ):\n return codecs.charmap_decode(input,self.errors,decoding_table)[0]\n \nclass StreamWriter(Codec,codecs.StreamWriter):\n pass\n \nclass StreamReader(Codec,codecs.StreamReader):\n pass\n \n \n \ndef getregentry():\n return codecs.CodecInfo(\n name='mac-cyrillic',\n encode=Codec().encode,\n decode=Codec().decode,\n incrementalencoder=IncrementalEncoder,\n incrementaldecoder=IncrementalDecoder,\n streamreader=StreamReader,\n streamwriter=StreamWriter,\n )\n \n \n \n \ndecoding_table=(\n'\\x00'\n'\\x01'\n'\\x02'\n'\\x03'\n'\\x04'\n'\\x05'\n'\\x06'\n'\\x07'\n'\\x08'\n'\\t'\n'\\n'\n'\\x0b'\n'\\x0c'\n'\\r'\n'\\x0e'\n'\\x0f'\n'\\x10'\n'\\x11'\n'\\x12'\n'\\x13'\n'\\x14'\n'\\x15'\n'\\x16'\n'\\x17'\n'\\x18'\n'\\x19'\n'\\x1a'\n'\\x1b'\n'\\x1c'\n'\\x1d'\n'\\x1e'\n'\\x1f'\n' '\n'!'\n'\"'\n'#'\n'$'\n'%'\n'&'\n\"'\"\n'('\n')'\n'*'\n'+'\n','\n'-'\n'.'\n'/'\n'0'\n'1'\n'2'\n'3'\n'4'\n'5'\n'6'\n'7'\n'8'\n'9'\n':'\n';'\n'<'\n'='\n'>'\n'?'\n'@'\n'A'\n'B'\n'C'\n'D'\n'E'\n'F'\n'G'\n'H'\n'I'\n'J'\n'K'\n'L'\n'M'\n'N'\n'O'\n'P'\n'Q'\n'R'\n'S'\n'T'\n'U'\n'V'\n'W'\n'X'\n'Y'\n'Z'\n'['\n'\\\\'\n']'\n'^'\n'_'\n'`'\n'a'\n'b'\n'c'\n'd'\n'e'\n'f'\n'g'\n'h'\n'i'\n'j'\n'k'\n'l'\n'm'\n'n'\n'o'\n'p'\n'q'\n'r'\n's'\n't'\n'u'\n'v'\n'w'\n'x'\n'y'\n'z'\n'{'\n'|'\n'}'\n'~'\n'\\x7f'\n'\\u0410'\n'\\u0411'\n'\\u0412'\n'\\u0413'\n'\\u0414'\n'\\u0415'\n'\\u0416'\n'\\u0417'\n'\\u0418'\n'\\u0419'\n'\\u041a'\n'\\u041b'\n'\\u041c'\n'\\u041d'\n'\\u041e'\n'\\u041f'\n'\\u0420'\n'\\u0421'\n'\\u0422'\n'\\u0423'\n'\\u0424'\n'\\u0425'\n'\\u0426'\n'\\u0427'\n'\\u0428'\n'\\u0429'\n'\\u042a'\n'\\u042b'\n'\\u042c'\n'\\u042d'\n'\\u042e'\n'\\u042f'\n'\\u2020'\n'\\xb0'\n'\\u0490'\n'\\xa3'\n'\\xa7'\n'\\u2022'\n'\\xb6'\n'\\u0406'\n'\\xae'\n'\\xa9'\n'\\u2122'\n'\\u0402'\n'\\u0452'\n'\\u2260'\n'\\u0403'\n'\\u0453'\n'\\u221e'\n'\\xb1'\n'\\u2264'\n'\\u2265'\n'\\u0456'\n'\\xb5'\n'\\u0491'\n'\\u0408'\n'\\u0404'\n'\\u0454'\n'\\u0407'\n'\\u0457'\n'\\u0409'\n'\\u0459'\n'\\u040a'\n'\\u045a'\n'\\u0458'\n'\\u0405'\n'\\xac'\n'\\u221a'\n'\\u0192'\n'\\u2248'\n'\\u2206'\n'\\xab'\n'\\xbb'\n'\\u2026'\n'\\xa0'\n'\\u040b'\n'\\u045b'\n'\\u040c'\n'\\u045c'\n'\\u0455'\n'\\u2013'\n'\\u2014'\n'\\u201c'\n'\\u201d'\n'\\u2018'\n'\\u2019'\n'\\xf7'\n'\\u201e'\n'\\u040e'\n'\\u045e'\n'\\u040f'\n'\\u045f'\n'\\u2116'\n'\\u0401'\n'\\u0451'\n'\\u044f'\n'\\u0430'\n'\\u0431'\n'\\u0432'\n'\\u0433'\n'\\u0434'\n'\\u0435'\n'\\u0436'\n'\\u0437'\n'\\u0438'\n'\\u0439'\n'\\u043a'\n'\\u043b'\n'\\u043c'\n'\\u043d'\n'\\u043e'\n'\\u043f'\n'\\u0440'\n'\\u0441'\n'\\u0442'\n'\\u0443'\n'\\u0444'\n'\\u0445'\n'\\u0446'\n'\\u0447'\n'\\u0448'\n'\\u0449'\n'\\u044a'\n'\\u044b'\n'\\u044c'\n'\\u044d'\n'\\u044e'\n'\\u20ac'\n)\n\n\nencoding_table=codecs.charmap_build(decoding_table)\n"], "binascii": [".py", "''\n\n\n\n\n\n\n\nclass Error(Exception):\n pass\n \nclass Done(Exception):\n pass\n \nclass Incomplete(Exception):\n pass\n \ndef a2b_uu(s):\n if not s:\n return''\n \n length=(ord(s[0])-0x20)%64\n \n def quadruplets_gen(s):\n while s:\n try :\n yield ord(s[0]),ord(s[1]),ord(s[2]),ord(s[3])\n except IndexError:\n s +=' '\n yield ord(s[0]),ord(s[1]),ord(s[2]),ord(s[3])\n return\n s=s[4:]\n \n try :\n result=[''.join(\n [chr((A -0x20)<<2 |(((B -0x20)>>4)&0x3)),\n chr(((B -0x20)&0xf)<<4 |(((C -0x20)>>2)&0xf)),\n chr(((C -0x20)&0x3)<<6 |((D -0x20)&0x3f))\n ])for A,B,C,D in quadruplets_gen(s[1:].rstrip())]\n except ValueError:\n raise Error('Illegal char')\n result=''.join(result)\n trailingdata=result[length:]\n if trailingdata.strip('\\x00'):\n raise Error('Trailing garbage')\n result=result[:length]\n if len(result)45:\n raise Error('At most 45 bytes at once')\n \n def triples_gen(s):\n while s:\n try :\n yield ord(s[0]),ord(s[1]),ord(s[2])\n except IndexError:\n s +='\\0\\0'\n yield ord(s[0]),ord(s[1]),ord(s[2])\n return\n s=s[3:]\n \n result=[''.join(\n [chr(0x20+((A >>2)&0x3F)),\n chr(0x20+(((A <<4)|((B >>4)&0xF))&0x3F)),\n chr(0x20+(((B <<2)|((C >>6)&0x3))&0x3F)),\n chr(0x20+((C)&0x3F))])\n for A,B,C in triples_gen(s)]\n return chr(ord(' ')+(length&0o77))+''.join(result)+'\\n'\n \n \ntable_a2b_base64={\n'A':0,\n'B':1,\n'C':2,\n'D':3,\n'E':4,\n'F':5,\n'G':6,\n'H':7,\n'I':8,\n'J':9,\n'K':10,\n'L':11,\n'M':12,\n'N':13,\n'O':14,\n'P':15,\n'Q':16,\n'R':17,\n'S':18,\n'T':19,\n'U':20,\n'V':21,\n'W':22,\n'X':23,\n'Y':24,\n'Z':25,\n'a':26,\n'b':27,\n'c':28,\n'd':29,\n'e':30,\n'f':31,\n'g':32,\n'h':33,\n'i':34,\n'j':35,\n'k':36,\n'l':37,\n'm':38,\n'n':39,\n'o':40,\n'p':41,\n'q':42,\n'r':43,\n's':44,\n't':45,\n'u':46,\n'v':47,\n'w':48,\n'x':49,\n'y':50,\n'z':51,\n'0':52,\n'1':53,\n'2':54,\n'3':55,\n'4':56,\n'5':57,\n'6':58,\n'7':59,\n'8':60,\n'9':61,\n'+':62,\n'/':63,\n'=':0,\n}\n\n\ndef a2b_base64(s):\n if not isinstance(s,(str,bytes)):\n raise TypeError(\"expected string, got %r\"%(s,))\n s=s.rstrip()\n \n \n \n def next_valid_char(s,pos):\n for i in range(pos+1,len(s)):\n c=s[i]\n if c <0x7f:\n try :\n table_a2b_base64[chr(c)]\n return chr(c)\n except KeyError:\n pass\n return None\n \n quad_pos=0\n leftbits=0\n leftchar=0\n res=[]\n for i,c in enumerate(s):\n if isinstance(c,int):\n c=chr(c)\n if c >'\\x7f'or c =='\\n'or c =='\\r'or c ==' ':\n continue\n if c =='=':\n if quad_pos <2 or (quad_pos ==2 and next_valid_char(s,i)!='='):\n continue\n else :\n leftbits=0\n break\n try :\n next_c=table_a2b_base64[c]\n except KeyError:\n continue\n quad_pos=(quad_pos+1)&0x03\n leftchar=(leftchar <<6)|next_c\n leftbits +=6\n if leftbits >=8:\n leftbits -=8\n res.append((leftchar >>leftbits&0xff))\n leftchar &=((1 <>2)&0x3F],\n table_b2a_base64[((A <<4)|((B >>4)&0xF))&0x3F],\n table_b2a_base64[((B <<2)|((C >>6)&0x3))&0x3F],\n table_b2a_base64[(C)&0x3F]])\n for A,B,C in a]\n \n final=s[length -final_length:]\n if final_length ==0:\n snippet=''\n elif final_length ==1:\n a=final[0]\n snippet=table_b2a_base64[(a >>2)&0x3F]+ table_b2a_base64[(a <<4)&0x3F]+'=='\n else :\n a=final[0]\n b=final[1]\n snippet=table_b2a_base64[(a >>2)&0x3F]+ table_b2a_base64[((a <<4)|(b >>4)&0xF)&0x3F]+ table_b2a_base64[(b <<2)&0x3F]+'='\n return bytes(''.join(result)+snippet+'\\n',__BRYTHON__.charset)\n \ndef a2b_qp(s,header=False ):\n inp=0\n odata=[]\n while inp =len(s):\n break\n \n if (s[inp]=='\\n')or (s[inp]=='\\r'):\n if s[inp]!='\\n':\n while inp 0 and data[lf -1]=='\\r'\n \n inp=0\n linelen=0\n odata=[]\n while inp '~'or\n c =='='or\n (header and c =='_')or\n (c =='.'and linelen ==0 and (inp+1 ==len(data)or\n data[inp+1]=='\\n'or\n data[inp+1]=='\\r'))or\n (not istext and (c =='\\r'or c =='\\n'))or\n ((c =='\\t'or c ==' ')and (inp+1 ==len(data)))or\n (c <=' 'and c !='\\r'and c !='\\n'and\n (quotetabs or (not quotetabs and (c !='\\t'and c !=' '))))):\n linelen +=3\n if linelen >=MAXLINESIZE:\n odata.append('=')\n if crlf:odata.append('\\r')\n odata.append('\\n')\n linelen=3\n odata.append('='+two_hex_digits(ord(c)))\n inp +=1\n else :\n if (istext and\n (c =='\\n'or (inp+1 0 and\n (odata[-1]==' 'or odata[-1]=='\\t')):\n ch=ord(odata[-1])\n odata[-1]='='\n odata.append(two_hex_digits(ch))\n \n if crlf:odata.append('\\r')\n odata.append('\\n')\n if c =='\\r':\n inp +=2\n else :\n inp +=1\n else :\n if (inp+1 =MAXLINESIZE):\n odata.append('=')\n if crlf:odata.append('\\r')\n odata.append('\\n')\n linelen=0\n \n linelen +=1\n if header and c ==' ':\n c='_'\n odata.append(c)\n inp +=1\n return''.join(odata)\n \nhex_numbers='0123456789ABCDEF'\ndef hex(n):\n if n ==0:\n return'0'\n \n if n <0:\n n=-n\n sign='-'\n else :\n sign=''\n arr=[]\n \n def hex_gen(n):\n ''\n while n:\n yield n %0x10\n n=n /0x10\n \n for nibble in hex_gen(n):\n arr=[hex_numbers[nibble]]+arr\n return sign+''.join(arr)\n \ndef two_hex_digits(n):\n return hex_numbers[n /0x10]+hex_numbers[n %0x10]\n \n \ndef strhex_to_int(s):\n i=0\n for c in s:\n i=i *0x10+hex_numbers.index(c)\n return i\n \nhqx_encoding='!\"#$%&\\'()*+,-012345689@ABCDEFGHIJKLMNPQRSTUVXYZ[`abcdefhijklmpqr'\n\nDONE=0x7f\nSKIP=0x7e\nFAIL=0x7d\n\ntable_a2b_hqx=[\n\nFAIL,FAIL,FAIL,FAIL,FAIL,FAIL,FAIL,FAIL,\n\nFAIL,FAIL,SKIP,FAIL,FAIL,SKIP,FAIL,FAIL,\n\nFAIL,FAIL,FAIL,FAIL,FAIL,FAIL,FAIL,FAIL,\n\nFAIL,FAIL,FAIL,FAIL,FAIL,FAIL,FAIL,FAIL,\n\nFAIL,0x00,0x01,0x02,0x03,0x04,0x05,0x06,\n\n0x07,0x08,0x09,0x0A,0x0B,0x0C,FAIL,FAIL,\n\n0x0D,0x0E,0x0F,0x10,0x11,0x12,0x13,FAIL,\n\n0x14,0x15,DONE,FAIL,FAIL,FAIL,FAIL,FAIL,\n\n0x16,0x17,0x18,0x19,0x1A,0x1B,0x1C,0x1D,\n\n0x1E,0x1F,0x20,0x21,0x22,0x23,0x24,FAIL,\n\n0x25,0x26,0x27,0x28,0x29,0x2A,0x2B,FAIL,\n\n0x2C,0x2D,0x2E,0x2F,FAIL,FAIL,FAIL,FAIL,\n\n0x30,0x31,0x32,0x33,0x34,0x35,0x36,FAIL,\n\n0x37,0x38,0x39,0x3A,0x3B,0x3C,FAIL,FAIL,\n\n0x3D,0x3E,0x3F,FAIL,FAIL,FAIL,FAIL,FAIL,\n\nFAIL,FAIL,FAIL,FAIL,FAIL,FAIL,FAIL,FAIL,\nFAIL,FAIL,FAIL,FAIL,FAIL,FAIL,FAIL,FAIL,\nFAIL,FAIL,FAIL,FAIL,FAIL,FAIL,FAIL,FAIL,\nFAIL,FAIL,FAIL,FAIL,FAIL,FAIL,FAIL,FAIL,\nFAIL,FAIL,FAIL,FAIL,FAIL,FAIL,FAIL,FAIL,\nFAIL,FAIL,FAIL,FAIL,FAIL,FAIL,FAIL,FAIL,\nFAIL,FAIL,FAIL,FAIL,FAIL,FAIL,FAIL,FAIL,\nFAIL,FAIL,FAIL,FAIL,FAIL,FAIL,FAIL,FAIL,\nFAIL,FAIL,FAIL,FAIL,FAIL,FAIL,FAIL,FAIL,\nFAIL,FAIL,FAIL,FAIL,FAIL,FAIL,FAIL,FAIL,\nFAIL,FAIL,FAIL,FAIL,FAIL,FAIL,FAIL,FAIL,\nFAIL,FAIL,FAIL,FAIL,FAIL,FAIL,FAIL,FAIL,\nFAIL,FAIL,FAIL,FAIL,FAIL,FAIL,FAIL,FAIL,\nFAIL,FAIL,FAIL,FAIL,FAIL,FAIL,FAIL,FAIL,\nFAIL,FAIL,FAIL,FAIL,FAIL,FAIL,FAIL,FAIL,\nFAIL,FAIL,FAIL,FAIL,FAIL,FAIL,FAIL,FAIL,\nFAIL,FAIL,FAIL,FAIL,FAIL,FAIL,FAIL,FAIL,\n]\n\ndef a2b_hqx(s):\n result=[]\n \n def quadruples_gen(s):\n t=[]\n for c in s:\n res=table_a2b_hqx[ord(c)]\n if res ==SKIP:\n continue\n elif res ==FAIL:\n raise Error('Illegal character')\n elif res ==DONE:\n yield t\n raise Done\n else :\n t.append(res)\n if len(t)==4:\n yield t\n t=[]\n yield t\n \n done=0\n try :\n for snippet in quadruples_gen(s):\n length=len(snippet)\n if length ==4:\n result.append(chr(((snippet[0]&0x3f)<<2)|(snippet[1]>>4)))\n result.append(chr(((snippet[1]&0x0f)<<4)|(snippet[2]>>2)))\n result.append(chr(((snippet[2]&0x03)<<6)|(snippet[3])))\n elif length ==3:\n result.append(chr(((snippet[0]&0x3f)<<2)|(snippet[1]>>4)))\n result.append(chr(((snippet[1]&0x0f)<<4)|(snippet[2]>>2)))\n elif length ==2:\n result.append(chr(((snippet[0]&0x3f)<<2)|(snippet[1]>>4)))\n except Done:\n done=1\n except Error:\n raise\n return (''.join(result),done)\n \n \n \ndef b2a_hqx(s):\n result=[]\n \n def triples_gen(s):\n while s:\n try :\n yield ord(s[0]),ord(s[1]),ord(s[2])\n except IndexError:\n yield tuple([ord(c)for c in s])\n s=s[3:]\n \n for snippet in triples_gen(s):\n length=len(snippet)\n if length ==3:\n result.append(\n hqx_encoding[(snippet[0]&0xfc)>>2])\n result.append(hqx_encoding[\n ((snippet[0]&0x03)<<4)|((snippet[1]&0xf0)>>4)])\n result.append(hqx_encoding[\n (snippet[1]&0x0f)<<2 |((snippet[2]&0xc0)>>6)])\n result.append(hqx_encoding[snippet[2]&0x3f])\n elif length ==2:\n result.append(\n hqx_encoding[(snippet[0]&0xfc)>>2])\n result.append(hqx_encoding[\n ((snippet[0]&0x03)<<4)|((snippet[1]&0xf0)>>4)])\n result.append(hqx_encoding[\n (snippet[1]&0x0f)<<2])\n elif length ==1:\n result.append(\n hqx_encoding[(snippet[0]&0xfc)>>2])\n result.append(hqx_encoding[\n ((snippet[0]&0x03)<<4)])\n return''.join(result)\n \ncrctab_hqx=[\n0x0000,0x1021,0x2042,0x3063,0x4084,0x50a5,0x60c6,0x70e7,\n0x8108,0x9129,0xa14a,0xb16b,0xc18c,0xd1ad,0xe1ce,0xf1ef,\n0x1231,0x0210,0x3273,0x2252,0x52b5,0x4294,0x72f7,0x62d6,\n0x9339,0x8318,0xb37b,0xa35a,0xd3bd,0xc39c,0xf3ff,0xe3de,\n0x2462,0x3443,0x0420,0x1401,0x64e6,0x74c7,0x44a4,0x5485,\n0xa56a,0xb54b,0x8528,0x9509,0xe5ee,0xf5cf,0xc5ac,0xd58d,\n0x3653,0x2672,0x1611,0x0630,0x76d7,0x66f6,0x5695,0x46b4,\n0xb75b,0xa77a,0x9719,0x8738,0xf7df,0xe7fe,0xd79d,0xc7bc,\n0x48c4,0x58e5,0x6886,0x78a7,0x0840,0x1861,0x2802,0x3823,\n0xc9cc,0xd9ed,0xe98e,0xf9af,0x8948,0x9969,0xa90a,0xb92b,\n0x5af5,0x4ad4,0x7ab7,0x6a96,0x1a71,0x0a50,0x3a33,0x2a12,\n0xdbfd,0xcbdc,0xfbbf,0xeb9e,0x9b79,0x8b58,0xbb3b,0xab1a,\n0x6ca6,0x7c87,0x4ce4,0x5cc5,0x2c22,0x3c03,0x0c60,0x1c41,\n0xedae,0xfd8f,0xcdec,0xddcd,0xad2a,0xbd0b,0x8d68,0x9d49,\n0x7e97,0x6eb6,0x5ed5,0x4ef4,0x3e13,0x2e32,0x1e51,0x0e70,\n0xff9f,0xefbe,0xdfdd,0xcffc,0xbf1b,0xaf3a,0x9f59,0x8f78,\n0x9188,0x81a9,0xb1ca,0xa1eb,0xd10c,0xc12d,0xf14e,0xe16f,\n0x1080,0x00a1,0x30c2,0x20e3,0x5004,0x4025,0x7046,0x6067,\n0x83b9,0x9398,0xa3fb,0xb3da,0xc33d,0xd31c,0xe37f,0xf35e,\n0x02b1,0x1290,0x22f3,0x32d2,0x4235,0x5214,0x6277,0x7256,\n0xb5ea,0xa5cb,0x95a8,0x8589,0xf56e,0xe54f,0xd52c,0xc50d,\n0x34e2,0x24c3,0x14a0,0x0481,0x7466,0x6447,0x5424,0x4405,\n0xa7db,0xb7fa,0x8799,0x97b8,0xe75f,0xf77e,0xc71d,0xd73c,\n0x26d3,0x36f2,0x0691,0x16b0,0x6657,0x7676,0x4615,0x5634,\n0xd94c,0xc96d,0xf90e,0xe92f,0x99c8,0x89e9,0xb98a,0xa9ab,\n0x5844,0x4865,0x7806,0x6827,0x18c0,0x08e1,0x3882,0x28a3,\n0xcb7d,0xdb5c,0xeb3f,0xfb1e,0x8bf9,0x9bd8,0xabbb,0xbb9a,\n0x4a75,0x5a54,0x6a37,0x7a16,0x0af1,0x1ad0,0x2ab3,0x3a92,\n0xfd2e,0xed0f,0xdd6c,0xcd4d,0xbdaa,0xad8b,0x9de8,0x8dc9,\n0x7c26,0x6c07,0x5c64,0x4c45,0x3ca2,0x2c83,0x1ce0,0x0cc1,\n0xef1f,0xff3e,0xcf5d,0xdf7c,0xaf9b,0xbfba,0x8fd9,0x9ff8,\n0x6e17,0x7e36,0x4e55,0x5e74,0x2e93,0x3eb2,0x0ed1,0x1ef0,\n]\n\ndef crc_hqx(s,crc):\n for c in s:\n crc=((crc <<8)&0xff00)^crctab_hqx[((crc >>8)&0xff)^ord(c)]\n \n return crc\n \ndef rlecode_hqx(s):\n ''\n\n\n\n \n if not s:\n return''\n result=[]\n prev=s[0]\n count=1\n \n \n \n \n if s[-1]=='!':\n s=s[1:]+'?'\n else :\n s=s[1:]+'!'\n \n for c in s:\n if c ==prev and count <255:\n count +=1\n else :\n if count ==1:\n if prev !='\\x90':\n result.append(prev)\n else :\n result.extend(['\\x90','\\x00'])\n elif count <4:\n if prev !='\\x90':\n result.extend([prev]*count)\n else :\n result.extend(['\\x90','\\x00']*count)\n else :\n if prev !='\\x90':\n result.extend([prev,'\\x90',chr(count)])\n else :\n result.extend(['\\x90','\\x00','\\x90',chr(count)])\n count=1\n prev=c\n \n return''.join(result)\n \ndef rledecode_hqx(s):\n s=s.split('\\x90')\n result=[s[0]]\n prev=s[0]\n for snippet in s[1:]:\n count=ord(snippet[0])\n if count >0:\n result.append(prev[-1]*(count -1))\n prev=snippet\n else :\n result.append('\\x90')\n prev='\\x90'\n result.append(snippet[1:])\n \n return''.join(result)\n \ncrc_32_tab=[\n0x00000000,0x77073096,0xee0e612c,0x990951ba,0x076dc419,\n0x706af48f,0xe963a535,0x9e6495a3,0x0edb8832,0x79dcb8a4,\n0xe0d5e91e,0x97d2d988,0x09b64c2b,0x7eb17cbd,0xe7b82d07,\n0x90bf1d91,0x1db71064,0x6ab020f2,0xf3b97148,0x84be41de,\n0x1adad47d,0x6ddde4eb,0xf4d4b551,0x83d385c7,0x136c9856,\n0x646ba8c0,0xfd62f97a,0x8a65c9ec,0x14015c4f,0x63066cd9,\n0xfa0f3d63,0x8d080df5,0x3b6e20c8,0x4c69105e,0xd56041e4,\n0xa2677172,0x3c03e4d1,0x4b04d447,0xd20d85fd,0xa50ab56b,\n0x35b5a8fa,0x42b2986c,0xdbbbc9d6,0xacbcf940,0x32d86ce3,\n0x45df5c75,0xdcd60dcf,0xabd13d59,0x26d930ac,0x51de003a,\n0xc8d75180,0xbfd06116,0x21b4f4b5,0x56b3c423,0xcfba9599,\n0xb8bda50f,0x2802b89e,0x5f058808,0xc60cd9b2,0xb10be924,\n0x2f6f7c87,0x58684c11,0xc1611dab,0xb6662d3d,0x76dc4190,\n0x01db7106,0x98d220bc,0xefd5102a,0x71b18589,0x06b6b51f,\n0x9fbfe4a5,0xe8b8d433,0x7807c9a2,0x0f00f934,0x9609a88e,\n0xe10e9818,0x7f6a0dbb,0x086d3d2d,0x91646c97,0xe6635c01,\n0x6b6b51f4,0x1c6c6162,0x856530d8,0xf262004e,0x6c0695ed,\n0x1b01a57b,0x8208f4c1,0xf50fc457,0x65b0d9c6,0x12b7e950,\n0x8bbeb8ea,0xfcb9887c,0x62dd1ddf,0x15da2d49,0x8cd37cf3,\n0xfbd44c65,0x4db26158,0x3ab551ce,0xa3bc0074,0xd4bb30e2,\n0x4adfa541,0x3dd895d7,0xa4d1c46d,0xd3d6f4fb,0x4369e96a,\n0x346ed9fc,0xad678846,0xda60b8d0,0x44042d73,0x33031de5,\n0xaa0a4c5f,0xdd0d7cc9,0x5005713c,0x270241aa,0xbe0b1010,\n0xc90c2086,0x5768b525,0x206f85b3,0xb966d409,0xce61e49f,\n0x5edef90e,0x29d9c998,0xb0d09822,0xc7d7a8b4,0x59b33d17,\n0x2eb40d81,0xb7bd5c3b,0xc0ba6cad,0xedb88320,0x9abfb3b6,\n0x03b6e20c,0x74b1d29a,0xead54739,0x9dd277af,0x04db2615,\n0x73dc1683,0xe3630b12,0x94643b84,0x0d6d6a3e,0x7a6a5aa8,\n0xe40ecf0b,0x9309ff9d,0x0a00ae27,0x7d079eb1,0xf00f9344,\n0x8708a3d2,0x1e01f268,0x6906c2fe,0xf762575d,0x806567cb,\n0x196c3671,0x6e6b06e7,0xfed41b76,0x89d32be0,0x10da7a5a,\n0x67dd4acc,0xf9b9df6f,0x8ebeeff9,0x17b7be43,0x60b08ed5,\n0xd6d6a3e8,0xa1d1937e,0x38d8c2c4,0x4fdff252,0xd1bb67f1,\n0xa6bc5767,0x3fb506dd,0x48b2364b,0xd80d2bda,0xaf0a1b4c,\n0x36034af6,0x41047a60,0xdf60efc3,0xa867df55,0x316e8eef,\n0x4669be79,0xcb61b38c,0xbc66831a,0x256fd2a0,0x5268e236,\n0xcc0c7795,0xbb0b4703,0x220216b9,0x5505262f,0xc5ba3bbe,\n0xb2bd0b28,0x2bb45a92,0x5cb36a04,0xc2d7ffa7,0xb5d0cf31,\n0x2cd99e8b,0x5bdeae1d,0x9b64c2b0,0xec63f226,0x756aa39c,\n0x026d930a,0x9c0906a9,0xeb0e363f,0x72076785,0x05005713,\n0x95bf4a82,0xe2b87a14,0x7bb12bae,0x0cb61b38,0x92d28e9b,\n0xe5d5be0d,0x7cdcefb7,0x0bdbdf21,0x86d3d2d4,0xf1d4e242,\n0x68ddb3f8,0x1fda836e,0x81be16cd,0xf6b9265b,0x6fb077e1,\n0x18b74777,0x88085ae6,0xff0f6a70,0x66063bca,0x11010b5c,\n0x8f659eff,0xf862ae69,0x616bffd3,0x166ccf45,0xa00ae278,\n0xd70dd2ee,0x4e048354,0x3903b3c2,0xa7672661,0xd06016f7,\n0x4969474d,0x3e6e77db,0xaed16a4a,0xd9d65adc,0x40df0b66,\n0x37d83bf0,0xa9bcae53,0xdebb9ec5,0x47b2cf7f,0x30b5ffe9,\n0xbdbdf21c,0xcabac28a,0x53b39330,0x24b4a3a6,0xbad03605,\n0xcdd70693,0x54de5729,0x23d967bf,0xb3667a2e,0xc4614ab8,\n0x5d681b02,0x2a6f2b94,0xb40bbe37,0xc30c8ea1,0x5a05df1b,\n0x2d02ef8d\n]\n\ndef crc32(s,crc=0):\n result=0\n crc=~int(crc)&0xffffffff\n \n for c in s:\n crc=crc_32_tab[(crc ^int(ord(c)))&0xff]^(crc >>8)\n \n \n \n result=crc ^0xffffffff\n \n if result >2 **31:\n result=((result+2 **31)%2 **32)-2 **31\n \n return result\n \ndef b2a_hex(s):\n result=[]\n for char in s:\n c=(ord(char)>>4)&0xf\n if c >9:\n c=c+ord('a')-10\n else :\n c=c+ord('0')\n result.append(chr(c))\n c=ord(char)&0xf\n if c >9:\n c=c+ord('a')-10\n else :\n c=c+ord('0')\n result.append(chr(c))\n return''.join(result)\n \nhexlify=b2a_hex\n\ntable_hex=[\n-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,\n-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,\n-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,\n0,1,2,3,4,5,6,7,8,9,-1,-1,-1,-1,-1,-1,\n-1,10,11,12,13,14,15,-1,-1,-1,-1,-1,-1,-1,-1,-1,\n-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,\n-1,10,11,12,13,14,15,-1,-1,-1,-1,-1,-1,-1,-1,-1,\n-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1\n]\n\n\ndef a2b_hex(t):\n result=[]\n \n def pairs_gen(s):\n while s:\n try :\n yield table_hex[ord(s[0])],table_hex[ord(s[1])]\n except IndexError:\n if len(s):\n raise TypeError('Odd-length string')\n return\n s=s[2:]\n \n for a,b in pairs_gen(t):\n if a <0 or b <0:\n raise TypeError('Non-hexadecimal digit found')\n result.append(chr((a <<4)+b))\n return bytes(''.join(result),__BRYTHON__.charset)\n \n \nunhexlify=a2b_hex\n"], "_weakref": [".py", "class ProxyType:\n\n def __init__(self,obj):\n self.obj=obj\n \nCallableProxyType=ProxyType\nProxyTypes=[ProxyType,CallableProxyType]\n\nclass ReferenceType:\n\n def __init__(self,obj,callback):\n self.obj=obj\n self.callback=callback\n \nclass ref:\n\n def __init__(self,obj,callback=None ):\n self.obj=ReferenceType(obj,callback)\n self.callback=callback\n \n def __call__(self):\n return self.obj.obj\n \ndef getweakrefcount(obj):\n return 1\n \ndef getweakrefs(obj):\n return obj\n \n \ndef proxy(obj,callback=None ):\n return ProxyType(obj)\n \n"], "asyncio.log": [".py", "''\n\nimport logging\n\n\n\nlogger=logging.getLogger(__package__)\n"], "re": [".py", "\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\nr\"\"\"Support for regular expressions (RE).\n\nThis module provides regular expression matching operations similar to\nthose found in Perl. It supports both 8-bit and Unicode strings; both\nthe pattern and the strings being processed can contain null bytes and\ncharacters outside the US ASCII range.\n\nRegular expressions can contain both special and ordinary characters.\nMost ordinary characters, like \"A\", \"a\", or \"0\", are the simplest\nregular expressions; they simply match themselves. You can\nconcatenate ordinary characters, so last matches the string 'last'.\n\nThe special characters are:\n \".\" Matches any character except a newline.\n \"^\" Matches the start of the string.\n \"$\" Matches the end of the string or just before the newline at\n the end of the string.\n \"*\" Matches 0 or more (greedy) repetitions of the preceding RE.\n Greedy means that it will match as many repetitions as possible.\n \"+\" Matches 1 or more (greedy) repetitions of the preceding RE.\n \"?\" Matches 0 or 1 (greedy) of the preceding RE.\n *?,+?,?? Non-greedy versions of the previous three special characters.\n {m,n} Matches from m to n repetitions of the preceding RE.\n {m,n}? Non-greedy version of the above.\n \"\\\\\" Either escapes special characters or signals a special sequence.\n [] Indicates a set of characters.\n A \"^\" as the first character indicates a complementing set.\n \"|\" A|B, creates an RE that will match either A or B.\n (...) Matches the RE inside the parentheses.\n The contents can be retrieved or matched later in the string.\n (?aiLmsux) Set the A, I, L, M, S, U, or X flag for the RE (see below).\n (?:...) Non-grouping version of regular parentheses.\n (?P...) The substring matched by the group is accessible by name.\n (?P=name) Matches the text matched earlier by the group named name.\n (?#...) A comment; ignored.\n (?=...) Matches if ... matches next, but doesn't consume the string.\n (?!...) Matches if ... doesn't match next.\n (?<=...) Matches if preceded by ... (must be fixed length).\n (?=_MAXCACHE:\n _cache.clear()\n \n \n _cache[\"%s:%s:%s\"%(type(pattern),pattern,flags)]=p\n return p\n \ndef _compile_repl(repl,pattern):\n\n try :\n \n \n return _cache_repl[\"%s:%s\"%(repl,pattern)]\n except KeyError:\n pass\n p=sre_parse.parse_template(repl,pattern)\n if len(_cache_repl)>=_MAXCACHE:\n _cache_repl.clear()\n _cache_repl[\"%s:%s\"%(repl,pattern)]=p\n \n \n return p\n \ndef _expand(pattern,match,template):\n\n template=sre_parse.parse_template(template,pattern)\n return sre_parse.expand_template(template,match)\n \ndef _subx(pattern,template):\n\n template=_compile_repl(template,pattern)\n if not template[0]and len(template[1])==1:\n \n return template[1][0]\n def filter(match,template=template):\n return sre_parse.expand_template(template,match)\n return filter\n \n \n \nimport copyreg\n\ndef _pickle(p):\n return _compile,(p.pattern,p.flags)\n \ncopyreg.pickle(_pattern_type,_pickle,_compile)\n\n\n\n\nclass Scanner:\n def __init__(self,lexicon,flags=0):\n from sre_constants import BRANCH,SUBPATTERN\n self.lexicon=lexicon\n \n p=[]\n s=sre_parse.Pattern()\n s.flags=flags\n for phrase,action in lexicon:\n p.append(sre_parse.SubPattern(s,[\n (SUBPATTERN,(len(p)+1,sre_parse.parse(phrase,flags))),\n ]))\n s.groups=len(p)+1\n p=sre_parse.SubPattern(s,[(BRANCH,(None ,p))])\n self.scanner=sre_compile.compile(p)\n def scan(self,string):\n result=[]\n append=result.append\n match=self.scanner.scanner(string).match\n i=0\n while 1:\n m=match()\n if not m:\n break\n j=m.end()\n if i ==j:\n break\n action=self.lexicon[m.lastindex -1][1]\n if callable(action):\n self.match=m\n action=action(self,m.group())\n if action is not None :\n append(action)\n i=j\n return result,string[i:]\n"], "crypto_js.rollups.sha3": [".js", "/*\nCryptoJS v3.1.2\ncode.google.com/p/crypto-js\n(c) 2009-2013 by Jeff Mott. All rights reserved.\ncode.google.com/p/crypto-js/wiki/License\n*/\nvar CryptoJS=CryptoJS||function(v,p){var d={},u=d.lib={},r=function(){},f=u.Base={extend:function(a){r.prototype=this;var b=new r;a&&b.mixIn(a);b.hasOwnProperty(\"init\")||(b.init=function(){b.$super.init.apply(this,arguments)});b.init.prototype=b;b.$super=this;return b},create:function(){var a=this.extend();a.init.apply(a,arguments);return a},init:function(){},mixIn:function(a){for(var b in a)a.hasOwnProperty(b)&&(this[b]=a[b]);a.hasOwnProperty(\"toString\")&&(this.toString=a.toString)},clone:function(){return this.init.prototype.extend(this)}},\ns=u.WordArray=f.extend({init:function(a,b){a=this.words=a||[];this.sigBytes=b!=p?b:4*a.length},toString:function(a){return(a||y).stringify(this)},concat:function(a){var b=this.words,c=a.words,j=this.sigBytes;a=a.sigBytes;this.clamp();if(j%4)for(var n=0;n>>2]|=(c[n>>>2]>>>24-8*(n%4)&255)<<24-8*((j+n)%4);else if(65535>>2]=c[n>>>2];else b.push.apply(b,c);this.sigBytes+=a;return this},clamp:function(){var a=this.words,b=this.sigBytes;a[b>>>2]&=4294967295<<\n32-8*(b%4);a.length=v.ceil(b/4)},clone:function(){var a=f.clone.call(this);a.words=this.words.slice(0);return a},random:function(a){for(var b=[],c=0;c>>2]>>>24-8*(j%4)&255;c.push((n>>>4).toString(16));c.push((n&15).toString(16))}return c.join(\"\")},parse:function(a){for(var b=a.length,c=[],j=0;j>>3]|=parseInt(a.substr(j,\n2),16)<<24-4*(j%8);return new s.init(c,b/2)}},e=x.Latin1={stringify:function(a){var b=a.words;a=a.sigBytes;for(var c=[],j=0;j>>2]>>>24-8*(j%4)&255));return c.join(\"\")},parse:function(a){for(var b=a.length,c=[],j=0;j>>2]|=(a.charCodeAt(j)&255)<<24-8*(j%4);return new s.init(c,b)}},q=x.Utf8={stringify:function(a){try{return decodeURIComponent(escape(e.stringify(a)))}catch(b){throw Error(\"Malformed UTF-8 data\");}},parse:function(a){return e.parse(unescape(encodeURIComponent(a)))}},\nt=u.BufferedBlockAlgorithm=f.extend({reset:function(){this._data=new s.init;this._nDataBytes=0},_append:function(a){\"string\"==typeof a&&(a=q.parse(a));this._data.concat(a);this._nDataBytes+=a.sigBytes},_process:function(a){var b=this._data,c=b.words,j=b.sigBytes,n=this.blockSize,e=j/(4*n),e=a?v.ceil(e):v.max((e|0)-this._minBufferSize,0);a=e*n;j=v.min(4*a,j);if(a){for(var f=0;ft;t++){s[e+5*q]=(t+1)*(t+2)/2%64;var w=(2*e+3*q)%5,e=q%5,q=w}for(e=0;5>e;e++)for(q=0;5>q;q++)x[e+5*q]=q+5*((2*e+3*q)%5);e=1;for(q=0;24>q;q++){for(var a=w=t=0;7>a;a++){if(e&1){var b=(1<b?w^=1<e;e++)c[e]=f.create();d=d.SHA3=r.extend({cfg:r.cfg.extend({outputLength:512}),_doReset:function(){for(var a=this._state=\n[],b=0;25>b;b++)a[b]=new f.init;this.blockSize=(1600-2*this.cfg.outputLength)/32},_doProcessBlock:function(a,b){for(var e=this._state,f=this.blockSize/2,h=0;h>>24)&16711935|(l<<24|l>>>8)&4278255360,m=(m<<8|m>>>24)&16711935|(m<<24|m>>>8)&4278255360,g=e[h];g.high^=m;g.low^=l}for(f=0;24>f;f++){for(h=0;5>h;h++){for(var d=l=0,k=0;5>k;k++)g=e[h+5*k],l^=g.high,d^=g.low;g=c[h];g.high=l;g.low=d}for(h=0;5>h;h++){g=c[(h+4)%5];l=c[(h+1)%5];m=l.high;k=l.low;l=g.high^\n(m<<1|k>>>31);d=g.low^(k<<1|m>>>31);for(k=0;5>k;k++)g=e[h+5*k],g.high^=l,g.low^=d}for(m=1;25>m;m++)g=e[m],h=g.high,g=g.low,k=s[m],32>k?(l=h<>>32-k,d=g<>>32-k):(l=g<>>64-k,d=h<>>64-k),g=c[x[m]],g.high=l,g.low=d;g=c[0];h=e[0];g.high=h.high;g.low=h.low;for(h=0;5>h;h++)for(k=0;5>k;k++)m=h+5*k,g=e[m],l=c[m],m=c[(h+1)%5+5*k],d=c[(h+2)%5+5*k],g.high=l.high^~m.high&d.high,g.low=l.low^~m.low&d.low;g=e[0];h=y[f];g.high^=h.high;g.low^=h.low}},_doFinalize:function(){var a=this._data,\nb=a.words,c=8*a.sigBytes,e=32*this.blockSize;b[c>>>5]|=1<<24-c%32;b[(v.ceil((c+1)/e)*e>>>5)-1]|=128;a.sigBytes=4*b.length;this._process();for(var a=this._state,b=this.cfg.outputLength/8,c=b/8,e=[],h=0;h>>24)&16711935|(f<<24|f>>>8)&4278255360,d=(d<<8|d>>>24)&16711935|(d<<24|d>>>8)&4278255360;e.push(d);e.push(f)}return new u.init(e,b)},clone:function(){for(var a=r.clone.call(this),b=a._state=this._state.slice(0),c=0;25>c;c++)b[c]=b[c].clone();return a}});\np.SHA3=r._createHelper(d);p.HmacSHA3=r._createHmacHelper(d)})(Math);\n"], "_threading_local": [".py", "''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\nfrom weakref import ref\nfrom contextlib import contextmanager\n\n__all__=[\"local\"]\n\n\n\n\n\n\n\n\n\n\n\nclass _localimpl:\n ''\n __slots__='key','dicts','localargs','locallock','__weakref__'\n \n def __init__(self):\n \n \n \n self.key='_threading_local._localimpl.'+str(id(self))\n \n self.dicts={}\n \n def get_dict(self):\n ''\n \n thread=current_thread()\n return self.dicts[id(thread)][1]\n \n def create_dict(self):\n ''\n localdict={}\n key=self.key\n thread=current_thread()\n idt=id(thread)\n def local_deleted(_,key=key):\n \n thread=wrthread()\n if thread is not None :\n del thread.__dict__[key]\n def thread_deleted(_,idt=idt):\n \n \n \n \n local=wrlocal()\n if local is not None :\n dct=local.dicts.pop(idt)\n wrlocal=ref(self,local_deleted)\n wrthread=ref(thread,thread_deleted)\n thread.__dict__[key]=wrlocal\n self.dicts[idt]=wrthread,localdict\n return localdict\n \n \n@contextmanager\ndef _patch(self):\n impl=object.__getattribute__(self,'_local__impl')\n try :\n dct=impl.get_dict()\n except KeyError:\n dct=impl.create_dict()\n args,kw=impl.localargs\n self.__init__(*args,**kw)\n with impl.locallock:\n object.__setattr__(self,'__dict__',dct)\n yield\n \n \nclass local:\n __slots__='_local__impl','__dict__'\n \n def __new__(cls,*args,**kw):\n if (args or kw)and (cls.__init__ is object.__init__):\n raise TypeError(\"Initialization arguments are not supported\")\n self=object.__new__(cls)\n impl=_localimpl()\n impl.localargs=(args,kw)\n impl.locallock=RLock()\n object.__setattr__(self,'_local__impl',impl)\n \n \n \n impl.create_dict()\n return self\n \n def __getattribute__(self,name):\n with _patch(self):\n return object.__getattribute__(self,name)\n \n def __setattr__(self,name,value):\n if name =='__dict__':\n raise AttributeError(\n \"%r object attribute '__dict__' is read-only\"\n %self.__class__.__name__)\n with _patch(self):\n return object.__setattr__(self,name,value)\n \n def __delattr__(self,name):\n if name =='__dict__':\n raise AttributeError(\n \"%r object attribute '__dict__' is read-only\"\n %self.__class__.__name__)\n with _patch(self):\n return object.__delattr__(self,name)\n \n \nfrom threading import current_thread,RLock\n"], "urllib": [".py", "", 1], "_posixsubprocess": [".js", "var $module=(function($B){\n\n return {\n cloexec_pipe: function() {} // fixme\n }\n})(__BRYTHON__)\n"], "concurrent.futures.webworker": [".py", "\n\n\n\"\"\"Implements WebWorkerExecutor.\n\nThe follow diagram and text describe the data-flow through the system:\n\n|======================= In-process =====================|== Out-of-process ==|\n\n+----------+ +----------+ +--------+ +-----------+ +---------+\n| | => | Work Ids | => | | => | Call Q | => | |\n| | +----------+ | | +-----------+ | |\n| | | ... | | | | ... | | |\n| | | 6 | | | | 5, call() | | |\n| | | 7 | | | | ... | | |\n| Process | | ... | | Local | +-----------+ | Process |\n| Pool | +----------+ | Worker | | #1..n |\n| Executor | | Thread | | |\n| | +----------- + | | +-----------+ | |\n| | <=> | Work Items | <=> | | <= | Result Q | <= | |\n| | +------------+ | | +-----------+ | |\n| | | 6: call() | | | | ... | | |\n| | | future | | | | 4, result | | |\n| | | ... | | | | 3, except | | |\n+----------+ +------------+ +--------+ +-----------+ +---------+\n\nExecutor.submit() called:\n- creates a uniquely numbered _WorkItem and adds it to the \"Work Items\" dict\n- adds the id of the _WorkItem to the \"Work Ids\" queue\n\nLocal worker thread:\n- reads work ids from the \"Work Ids\" queue and looks up the corresponding\n WorkItem from the \"Work Items\" dict: if the work item has been cancelled then\n it is simply removed from the dict, otherwise it is repackaged as a\n _CallItem and put in the \"Call Q\". New _CallItems are put in the \"Call Q\"\n until \"Call Q\" is full. NOTE: the size of the \"Call Q\" is kept small because\n calls placed in the \"Call Q\" can no longer be cancelled with Future.cancel().\n- reads _ResultItems from \"Result Q\", updates the future stored in the\n \"Work Items\" dict and deletes the dict entry\n\nProcess #1..n:\n- reads _CallItems from \"Call Q\", executes the calls, and puts the resulting\n _ResultItems in \"Result Q\"\n\"\"\"\n\n__author__='Brian Quinlan (brian@sweetapp.com)'\n\nimport atexit\nimport os\nfrom concurrent.futures import _base\nimport queue\nfrom queue import Full\nimport multiprocessing\nfrom multiprocessing import SimpleQueue\nimport threading\nimport weakref\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n_threads_queues=weakref.WeakKeyDictionary()\n_shutdown=False\n\ndef _python_exit():\n global _shutdown\n _shutdown=True\n items=list(_threads_queues.items())\n for t,q in items:\n q.put(None )\n for t,q in items:\n t.join()\n \n \n \n \n \nEXTRA_QUEUED_CALLS=1\n\nclass _WorkItem(object):\n def __init__(self,future,fn,args,kwargs):\n self.future=future\n self.fn=fn\n self.args=args\n self.kwargs=kwargs\n \nclass _ResultItem(object):\n def __init__(self,work_id,exception=None ,result=None ):\n self.work_id=work_id\n self.exception=exception\n self.result=result\n \nclass _CallItem(object):\n def __init__(self,work_id,fn,args,kwargs):\n self.work_id=work_id\n self.fn=fn\n self.args=args\n self.kwargs=kwargs\n \ndef _web_worker(call_queue,result_queue):\n ''\n\n\n\n\n\n\n\n\n\n\n \n while True :\n call_item=call_queue.get(block=True )\n if call_item is None :\n \n result_queue.put(os.getpid())\n return\n try :\n r=call_item.fn(*call_item.args,**call_item.kwargs)\n except BaseException as e:\n result_queue.put(_ResultItem(call_item.work_id,\n exception=e))\n else :\n result_queue.put(_ResultItem(call_item.work_id,\n result=r))\n \ndef _add_call_item_to_queue(pending_work_items,\nwork_ids,\ncall_queue):\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n \n while True :\n if call_queue.full():\n return\n try :\n work_id=work_ids.get(block=False )\n except queue.Empty:\n return\n else :\n work_item=pending_work_items[work_id]\n \n if work_item.future.set_running_or_notify_cancel():\n call_queue.put(_CallItem(work_id,\n work_item.fn,\n work_item.args,\n work_item.kwargs),\n block=True )\n else :\n del pending_work_items[work_id]\n continue\n \ndef _queue_management_worker(executor_reference,\nprocesses,\npending_work_items,\nwork_ids_queue,\ncall_queue,\nresult_queue):\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n executor=None\n \n def shutting_down():\n return _shutdown or executor is None or executor._shutdown_thread\n \n def shutdown_worker():\n \n nb_children_alive=sum(p.is_alive()for p in processes.values())\n for i in range(0,nb_children_alive):\n call_queue.put_nowait(None )\n \n call_queue.close()\n \n \n for p in processes.values():\n p.join()\n \n reader=result_queue._reader\n \n while True :\n _add_call_item_to_queue(pending_work_items,\n work_ids_queue,\n call_queue)\n \n sentinels=[p.sentinel for p in processes.values()]\n assert sentinels\n \n ready=[reader]+sentinels\n if reader in ready:\n result_item=reader.recv()\n else :\n \n executor=executor_reference()\n if executor is not None :\n executor._broken=True\n executor._shutdown_thread=True\n executor=None\n \n for work_id,work_item in pending_work_items.items():\n work_item.future.set_exception(\n BrokenProcessPool(\n \"A process in the process pool was \"\n \"terminated abruptly while the future was \"\n \"running or pending.\"\n ))\n \n del work_item\n pending_work_items.clear()\n \n \n for p in processes.values():\n p.terminate()\n shutdown_worker()\n return\n if isinstance(result_item,int):\n \n \n assert shutting_down()\n p=processes.pop(result_item)\n p.join()\n if not processes:\n shutdown_worker()\n return\n elif result_item is not None :\n work_item=pending_work_items.pop(result_item.work_id,None )\n \n if work_item is not None :\n if result_item.exception:\n work_item.future.set_exception(result_item.exception)\n else :\n work_item.future.set_result(result_item.result)\n \n del work_item\n \n executor=executor_reference()\n \n \n \n \n if shutting_down():\n try :\n \n \n if not pending_work_items:\n shutdown_worker()\n return\n except Full:\n \n \n pass\n executor=None\n \n_system_limits_checked=False\n_system_limited=None\ndef _check_system_limits():\n global _system_limits_checked,_system_limited\n if _system_limits_checked:\n if _system_limited:\n raise NotImplementedError(_system_limited)\n _system_limits_checked=True\n try :\n nsems_max=os.sysconf(\"SC_SEM_NSEMS_MAX\")\n except (AttributeError,ValueError):\n \n return\n if nsems_max ==-1:\n \n \n return\n if nsems_max >=256:\n \n \n return\n _system_limited=\"system provides too few semaphores (%d available, 256 necessary)\"%nsems_max\n raise NotImplementedError(_system_limited)\n \n \nclass BrokenProcessPool(RuntimeError):\n ''\n\n\n \n \n \nclass WebWorkerExecutor(_base.Executor):\n def __init__(self,max_workers=None ):\n ''\n\n\n\n\n\n \n _check_system_limits()\n \n if max_workers is None :\n self._max_workers=os.cpu_count()or 1\n else :\n self._max_workers=max_workers\n \n \n \n \n self._call_queue=multiprocessing.Queue(self._max_workers+\n EXTRA_QUEUED_CALLS)\n \n \n \n self._call_queue._ignore_epipe=True\n self._result_queue=SimpleQueue()\n self._work_ids=queue.Queue()\n self._queue_management_thread=None\n \n self._webworkers={}\n \n \n self._shutdown_thread=False\n self._shutdown_lock=threading.Lock()\n self._broken=False\n self._queue_count=0\n self._pending_work_items={}\n \n def _start_queue_management_thread(self):\n \n \n def weakref_cb(_,q=self._result_queue):\n q.put(None )\n if self._queue_management_thread is None :\n \n self._adjust_process_count()\n self._queue_management_thread=threading.Thread(\n target=_queue_management_worker,\n args=(weakref.ref(self,weakref_cb),\n self._webworkers,\n self._pending_work_items,\n self._work_ids,\n self._call_queue,\n self._result_queue))\n self._queue_management_thread.daemon=True\n self._queue_management_thread.start()\n _threads_queues[self._queue_management_thread]=self._result_queue\n \n def _adjust_process_count(self):\n for _ in range(len(self._webworkers),self._max_workers):\n p=multiprocessing.Process(\n target=_web_worker,\n args=(self._call_queue,\n self._result_queue))\n p.start()\n self._webworkers[p.pid]=p\n \n def submit(self,fn,*args,**kwargs):\n with self._shutdown_lock:\n if self._broken:\n raise BrokenProcessPool('A child process terminated '\n 'abruptly, the process pool is not usable anymore')\n if self._shutdown_thread:\n raise RuntimeError('cannot schedule new futures after shutdown')\n \n f=_base.Future()\n w=_WorkItem(f,fn,args,kwargs)\n \n self._pending_work_items[self._queue_count]=w\n self._work_ids.put(self._queue_count)\n self._queue_count +=1\n \n self._result_queue.put(None )\n \n self._start_queue_management_thread()\n return f\n submit.__doc__=_base.Executor.submit.__doc__\n \n def shutdown(self,wait=True ):\n with self._shutdown_lock:\n self._shutdown_thread=True\n if self._queue_management_thread:\n \n self._result_queue.put(None )\n if wait:\n self._queue_management_thread.join()\n \n \n self._queue_management_thread=None\n self._call_queue=None\n self._result_queue=None\n self._webworkers=None\n shutdown.__doc__=_base.Executor.shutdown.__doc__\n \natexit.register(_python_exit)\n"], "encodings.cp857": [".py", "''\n\n\n\nimport codecs\n\n\n\nclass Codec(codecs.Codec):\n\n def encode(self,input,errors='strict'):\n return codecs.charmap_encode(input,errors,encoding_map)\n \n def decode(self,input,errors='strict'):\n return codecs.charmap_decode(input,errors,decoding_table)\n \nclass IncrementalEncoder(codecs.IncrementalEncoder):\n def encode(self,input,final=False ):\n return codecs.charmap_encode(input,self.errors,encoding_map)[0]\n \nclass IncrementalDecoder(codecs.IncrementalDecoder):\n def decode(self,input,final=False ):\n return codecs.charmap_decode(input,self.errors,decoding_table)[0]\n \nclass StreamWriter(Codec,codecs.StreamWriter):\n pass\n \nclass StreamReader(Codec,codecs.StreamReader):\n pass\n \n \n \ndef getregentry():\n return codecs.CodecInfo(\n name='cp857',\n encode=Codec().encode,\n decode=Codec().decode,\n incrementalencoder=IncrementalEncoder,\n incrementaldecoder=IncrementalDecoder,\n streamreader=StreamReader,\n streamwriter=StreamWriter,\n )\n \n \n \ndecoding_map=codecs.make_identity_dict(range(256))\ndecoding_map.update({\n0x0080:0x00c7,\n0x0081:0x00fc,\n0x0082:0x00e9,\n0x0083:0x00e2,\n0x0084:0x00e4,\n0x0085:0x00e0,\n0x0086:0x00e5,\n0x0087:0x00e7,\n0x0088:0x00ea,\n0x0089:0x00eb,\n0x008a:0x00e8,\n0x008b:0x00ef,\n0x008c:0x00ee,\n0x008d:0x0131,\n0x008e:0x00c4,\n0x008f:0x00c5,\n0x0090:0x00c9,\n0x0091:0x00e6,\n0x0092:0x00c6,\n0x0093:0x00f4,\n0x0094:0x00f6,\n0x0095:0x00f2,\n0x0096:0x00fb,\n0x0097:0x00f9,\n0x0098:0x0130,\n0x0099:0x00d6,\n0x009a:0x00dc,\n0x009b:0x00f8,\n0x009c:0x00a3,\n0x009d:0x00d8,\n0x009e:0x015e,\n0x009f:0x015f,\n0x00a0:0x00e1,\n0x00a1:0x00ed,\n0x00a2:0x00f3,\n0x00a3:0x00fa,\n0x00a4:0x00f1,\n0x00a5:0x00d1,\n0x00a6:0x011e,\n0x00a7:0x011f,\n0x00a8:0x00bf,\n0x00a9:0x00ae,\n0x00aa:0x00ac,\n0x00ab:0x00bd,\n0x00ac:0x00bc,\n0x00ad:0x00a1,\n0x00ae:0x00ab,\n0x00af:0x00bb,\n0x00b0:0x2591,\n0x00b1:0x2592,\n0x00b2:0x2593,\n0x00b3:0x2502,\n0x00b4:0x2524,\n0x00b5:0x00c1,\n0x00b6:0x00c2,\n0x00b7:0x00c0,\n0x00b8:0x00a9,\n0x00b9:0x2563,\n0x00ba:0x2551,\n0x00bb:0x2557,\n0x00bc:0x255d,\n0x00bd:0x00a2,\n0x00be:0x00a5,\n0x00bf:0x2510,\n0x00c0:0x2514,\n0x00c1:0x2534,\n0x00c2:0x252c,\n0x00c3:0x251c,\n0x00c4:0x2500,\n0x00c5:0x253c,\n0x00c6:0x00e3,\n0x00c7:0x00c3,\n0x00c8:0x255a,\n0x00c9:0x2554,\n0x00ca:0x2569,\n0x00cb:0x2566,\n0x00cc:0x2560,\n0x00cd:0x2550,\n0x00ce:0x256c,\n0x00cf:0x00a4,\n0x00d0:0x00ba,\n0x00d1:0x00aa,\n0x00d2:0x00ca,\n0x00d3:0x00cb,\n0x00d4:0x00c8,\n0x00d5:None ,\n0x00d6:0x00cd,\n0x00d7:0x00ce,\n0x00d8:0x00cf,\n0x00d9:0x2518,\n0x00da:0x250c,\n0x00db:0x2588,\n0x00dc:0x2584,\n0x00dd:0x00a6,\n0x00de:0x00cc,\n0x00df:0x2580,\n0x00e0:0x00d3,\n0x00e1:0x00df,\n0x00e2:0x00d4,\n0x00e3:0x00d2,\n0x00e4:0x00f5,\n0x00e5:0x00d5,\n0x00e6:0x00b5,\n0x00e7:None ,\n0x00e8:0x00d7,\n0x00e9:0x00da,\n0x00ea:0x00db,\n0x00eb:0x00d9,\n0x00ed:0x00ff,\n0x00ee:0x00af,\n0x00ef:0x00b4,\n0x00f0:0x00ad,\n0x00f1:0x00b1,\n0x00f2:None ,\n0x00f3:0x00be,\n0x00f4:0x00b6,\n0x00f5:0x00a7,\n0x00f6:0x00f7,\n0x00f7:0x00b8,\n0x00f8:0x00b0,\n0x00f9:0x00a8,\n0x00fa:0x00b7,\n0x00fb:0x00b9,\n0x00fc:0x00b3,\n0x00fd:0x00b2,\n0x00fe:0x25a0,\n0x00ff:0x00a0,\n})\n\n\n\ndecoding_table=(\n'\\x00'\n'\\x01'\n'\\x02'\n'\\x03'\n'\\x04'\n'\\x05'\n'\\x06'\n'\\x07'\n'\\x08'\n'\\t'\n'\\n'\n'\\x0b'\n'\\x0c'\n'\\r'\n'\\x0e'\n'\\x0f'\n'\\x10'\n'\\x11'\n'\\x12'\n'\\x13'\n'\\x14'\n'\\x15'\n'\\x16'\n'\\x17'\n'\\x18'\n'\\x19'\n'\\x1a'\n'\\x1b'\n'\\x1c'\n'\\x1d'\n'\\x1e'\n'\\x1f'\n' '\n'!'\n'\"'\n'#'\n'$'\n'%'\n'&'\n\"'\"\n'('\n')'\n'*'\n'+'\n','\n'-'\n'.'\n'/'\n'0'\n'1'\n'2'\n'3'\n'4'\n'5'\n'6'\n'7'\n'8'\n'9'\n':'\n';'\n'<'\n'='\n'>'\n'?'\n'@'\n'A'\n'B'\n'C'\n'D'\n'E'\n'F'\n'G'\n'H'\n'I'\n'J'\n'K'\n'L'\n'M'\n'N'\n'O'\n'P'\n'Q'\n'R'\n'S'\n'T'\n'U'\n'V'\n'W'\n'X'\n'Y'\n'Z'\n'['\n'\\\\'\n']'\n'^'\n'_'\n'`'\n'a'\n'b'\n'c'\n'd'\n'e'\n'f'\n'g'\n'h'\n'i'\n'j'\n'k'\n'l'\n'm'\n'n'\n'o'\n'p'\n'q'\n'r'\n's'\n't'\n'u'\n'v'\n'w'\n'x'\n'y'\n'z'\n'{'\n'|'\n'}'\n'~'\n'\\x7f'\n'\\xc7'\n'\\xfc'\n'\\xe9'\n'\\xe2'\n'\\xe4'\n'\\xe0'\n'\\xe5'\n'\\xe7'\n'\\xea'\n'\\xeb'\n'\\xe8'\n'\\xef'\n'\\xee'\n'\\u0131'\n'\\xc4'\n'\\xc5'\n'\\xc9'\n'\\xe6'\n'\\xc6'\n'\\xf4'\n'\\xf6'\n'\\xf2'\n'\\xfb'\n'\\xf9'\n'\\u0130'\n'\\xd6'\n'\\xdc'\n'\\xf8'\n'\\xa3'\n'\\xd8'\n'\\u015e'\n'\\u015f'\n'\\xe1'\n'\\xed'\n'\\xf3'\n'\\xfa'\n'\\xf1'\n'\\xd1'\n'\\u011e'\n'\\u011f'\n'\\xbf'\n'\\xae'\n'\\xac'\n'\\xbd'\n'\\xbc'\n'\\xa1'\n'\\xab'\n'\\xbb'\n'\\u2591'\n'\\u2592'\n'\\u2593'\n'\\u2502'\n'\\u2524'\n'\\xc1'\n'\\xc2'\n'\\xc0'\n'\\xa9'\n'\\u2563'\n'\\u2551'\n'\\u2557'\n'\\u255d'\n'\\xa2'\n'\\xa5'\n'\\u2510'\n'\\u2514'\n'\\u2534'\n'\\u252c'\n'\\u251c'\n'\\u2500'\n'\\u253c'\n'\\xe3'\n'\\xc3'\n'\\u255a'\n'\\u2554'\n'\\u2569'\n'\\u2566'\n'\\u2560'\n'\\u2550'\n'\\u256c'\n'\\xa4'\n'\\xba'\n'\\xaa'\n'\\xca'\n'\\xcb'\n'\\xc8'\n'\\ufffe'\n'\\xcd'\n'\\xce'\n'\\xcf'\n'\\u2518'\n'\\u250c'\n'\\u2588'\n'\\u2584'\n'\\xa6'\n'\\xcc'\n'\\u2580'\n'\\xd3'\n'\\xdf'\n'\\xd4'\n'\\xd2'\n'\\xf5'\n'\\xd5'\n'\\xb5'\n'\\ufffe'\n'\\xd7'\n'\\xda'\n'\\xdb'\n'\\xd9'\n'\\xec'\n'\\xff'\n'\\xaf'\n'\\xb4'\n'\\xad'\n'\\xb1'\n'\\ufffe'\n'\\xbe'\n'\\xb6'\n'\\xa7'\n'\\xf7'\n'\\xb8'\n'\\xb0'\n'\\xa8'\n'\\xb7'\n'\\xb9'\n'\\xb3'\n'\\xb2'\n'\\u25a0'\n'\\xa0'\n)\n\n\n\nencoding_map={\n0x0000:0x0000,\n0x0001:0x0001,\n0x0002:0x0002,\n0x0003:0x0003,\n0x0004:0x0004,\n0x0005:0x0005,\n0x0006:0x0006,\n0x0007:0x0007,\n0x0008:0x0008,\n0x0009:0x0009,\n0x000a:0x000a,\n0x000b:0x000b,\n0x000c:0x000c,\n0x000d:0x000d,\n0x000e:0x000e,\n0x000f:0x000f,\n0x0010:0x0010,\n0x0011:0x0011,\n0x0012:0x0012,\n0x0013:0x0013,\n0x0014:0x0014,\n0x0015:0x0015,\n0x0016:0x0016,\n0x0017:0x0017,\n0x0018:0x0018,\n0x0019:0x0019,\n0x001a:0x001a,\n0x001b:0x001b,\n0x001c:0x001c,\n0x001d:0x001d,\n0x001e:0x001e,\n0x001f:0x001f,\n0x0020:0x0020,\n0x0021:0x0021,\n0x0022:0x0022,\n0x0023:0x0023,\n0x0024:0x0024,\n0x0025:0x0025,\n0x0026:0x0026,\n0x0027:0x0027,\n0x0028:0x0028,\n0x0029:0x0029,\n0x002a:0x002a,\n0x002b:0x002b,\n0x002c:0x002c,\n0x002d:0x002d,\n0x002e:0x002e,\n0x002f:0x002f,\n0x0030:0x0030,\n0x0031:0x0031,\n0x0032:0x0032,\n0x0033:0x0033,\n0x0034:0x0034,\n0x0035:0x0035,\n0x0036:0x0036,\n0x0037:0x0037,\n0x0038:0x0038,\n0x0039:0x0039,\n0x003a:0x003a,\n0x003b:0x003b,\n0x003c:0x003c,\n0x003d:0x003d,\n0x003e:0x003e,\n0x003f:0x003f,\n0x0040:0x0040,\n0x0041:0x0041,\n0x0042:0x0042,\n0x0043:0x0043,\n0x0044:0x0044,\n0x0045:0x0045,\n0x0046:0x0046,\n0x0047:0x0047,\n0x0048:0x0048,\n0x0049:0x0049,\n0x004a:0x004a,\n0x004b:0x004b,\n0x004c:0x004c,\n0x004d:0x004d,\n0x004e:0x004e,\n0x004f:0x004f,\n0x0050:0x0050,\n0x0051:0x0051,\n0x0052:0x0052,\n0x0053:0x0053,\n0x0054:0x0054,\n0x0055:0x0055,\n0x0056:0x0056,\n0x0057:0x0057,\n0x0058:0x0058,\n0x0059:0x0059,\n0x005a:0x005a,\n0x005b:0x005b,\n0x005c:0x005c,\n0x005d:0x005d,\n0x005e:0x005e,\n0x005f:0x005f,\n0x0060:0x0060,\n0x0061:0x0061,\n0x0062:0x0062,\n0x0063:0x0063,\n0x0064:0x0064,\n0x0065:0x0065,\n0x0066:0x0066,\n0x0067:0x0067,\n0x0068:0x0068,\n0x0069:0x0069,\n0x006a:0x006a,\n0x006b:0x006b,\n0x006c:0x006c,\n0x006d:0x006d,\n0x006e:0x006e,\n0x006f:0x006f,\n0x0070:0x0070,\n0x0071:0x0071,\n0x0072:0x0072,\n0x0073:0x0073,\n0x0074:0x0074,\n0x0075:0x0075,\n0x0076:0x0076,\n0x0077:0x0077,\n0x0078:0x0078,\n0x0079:0x0079,\n0x007a:0x007a,\n0x007b:0x007b,\n0x007c:0x007c,\n0x007d:0x007d,\n0x007e:0x007e,\n0x007f:0x007f,\n0x00a0:0x00ff,\n0x00a1:0x00ad,\n0x00a2:0x00bd,\n0x00a3:0x009c,\n0x00a4:0x00cf,\n0x00a5:0x00be,\n0x00a6:0x00dd,\n0x00a7:0x00f5,\n0x00a8:0x00f9,\n0x00a9:0x00b8,\n0x00aa:0x00d1,\n0x00ab:0x00ae,\n0x00ac:0x00aa,\n0x00ad:0x00f0,\n0x00ae:0x00a9,\n0x00af:0x00ee,\n0x00b0:0x00f8,\n0x00b1:0x00f1,\n0x00b2:0x00fd,\n0x00b3:0x00fc,\n0x00b4:0x00ef,\n0x00b5:0x00e6,\n0x00b6:0x00f4,\n0x00b7:0x00fa,\n0x00b8:0x00f7,\n0x00b9:0x00fb,\n0x00ba:0x00d0,\n0x00bb:0x00af,\n0x00bc:0x00ac,\n0x00bd:0x00ab,\n0x00be:0x00f3,\n0x00bf:0x00a8,\n0x00c0:0x00b7,\n0x00c1:0x00b5,\n0x00c2:0x00b6,\n0x00c3:0x00c7,\n0x00c4:0x008e,\n0x00c5:0x008f,\n0x00c6:0x0092,\n0x00c7:0x0080,\n0x00c8:0x00d4,\n0x00c9:0x0090,\n0x00ca:0x00d2,\n0x00cb:0x00d3,\n0x00cc:0x00de,\n0x00cd:0x00d6,\n0x00ce:0x00d7,\n0x00cf:0x00d8,\n0x00d1:0x00a5,\n0x00d2:0x00e3,\n0x00d3:0x00e0,\n0x00d4:0x00e2,\n0x00d5:0x00e5,\n0x00d6:0x0099,\n0x00d7:0x00e8,\n0x00d8:0x009d,\n0x00d9:0x00eb,\n0x00da:0x00e9,\n0x00db:0x00ea,\n0x00dc:0x009a,\n0x00df:0x00e1,\n0x00e0:0x0085,\n0x00e1:0x00a0,\n0x00e2:0x0083,\n0x00e3:0x00c6,\n0x00e4:0x0084,\n0x00e5:0x0086,\n0x00e6:0x0091,\n0x00e7:0x0087,\n0x00e8:0x008a,\n0x00e9:0x0082,\n0x00ea:0x0088,\n0x00eb:0x0089,\n0x00ec:0x00ec,\n0x00ed:0x00a1,\n0x00ee:0x008c,\n0x00ef:0x008b,\n0x00f1:0x00a4,\n0x00f2:0x0095,\n0x00f3:0x00a2,\n0x00f4:0x0093,\n0x00f5:0x00e4,\n0x00f6:0x0094,\n0x00f7:0x00f6,\n0x00f8:0x009b,\n0x00f9:0x0097,\n0x00fa:0x00a3,\n0x00fb:0x0096,\n0x00fc:0x0081,\n0x00ff:0x00ed,\n0x011e:0x00a6,\n0x011f:0x00a7,\n0x0130:0x0098,\n0x0131:0x008d,\n0x015e:0x009e,\n0x015f:0x009f,\n0x2500:0x00c4,\n0x2502:0x00b3,\n0x250c:0x00da,\n0x2510:0x00bf,\n0x2514:0x00c0,\n0x2518:0x00d9,\n0x251c:0x00c3,\n0x2524:0x00b4,\n0x252c:0x00c2,\n0x2534:0x00c1,\n0x253c:0x00c5,\n0x2550:0x00cd,\n0x2551:0x00ba,\n0x2554:0x00c9,\n0x2557:0x00bb,\n0x255a:0x00c8,\n0x255d:0x00bc,\n0x2560:0x00cc,\n0x2563:0x00b9,\n0x2566:0x00cb,\n0x2569:0x00ca,\n0x256c:0x00ce,\n0x2580:0x00df,\n0x2584:0x00dc,\n0x2588:0x00db,\n0x2591:0x00b0,\n0x2592:0x00b1,\n0x2593:0x00b2,\n0x25a0:0x00fe,\n}\n"], "encodings.iso8859_7": [".py", "''\n\n\n\nimport codecs\n\n\n\nclass Codec(codecs.Codec):\n\n def encode(self,input,errors='strict'):\n return codecs.charmap_encode(input,errors,encoding_table)\n \n def decode(self,input,errors='strict'):\n return codecs.charmap_decode(input,errors,decoding_table)\n \nclass IncrementalEncoder(codecs.IncrementalEncoder):\n def encode(self,input,final=False ):\n return codecs.charmap_encode(input,self.errors,encoding_table)[0]\n \nclass IncrementalDecoder(codecs.IncrementalDecoder):\n def decode(self,input,final=False ):\n return codecs.charmap_decode(input,self.errors,decoding_table)[0]\n \nclass StreamWriter(Codec,codecs.StreamWriter):\n pass\n \nclass StreamReader(Codec,codecs.StreamReader):\n pass\n \n \n \ndef getregentry():\n return codecs.CodecInfo(\n name='iso8859-7',\n encode=Codec().encode,\n decode=Codec().decode,\n incrementalencoder=IncrementalEncoder,\n incrementaldecoder=IncrementalDecoder,\n streamreader=StreamReader,\n streamwriter=StreamWriter,\n )\n \n \n \n \ndecoding_table=(\n'\\x00'\n'\\x01'\n'\\x02'\n'\\x03'\n'\\x04'\n'\\x05'\n'\\x06'\n'\\x07'\n'\\x08'\n'\\t'\n'\\n'\n'\\x0b'\n'\\x0c'\n'\\r'\n'\\x0e'\n'\\x0f'\n'\\x10'\n'\\x11'\n'\\x12'\n'\\x13'\n'\\x14'\n'\\x15'\n'\\x16'\n'\\x17'\n'\\x18'\n'\\x19'\n'\\x1a'\n'\\x1b'\n'\\x1c'\n'\\x1d'\n'\\x1e'\n'\\x1f'\n' '\n'!'\n'\"'\n'#'\n'$'\n'%'\n'&'\n\"'\"\n'('\n')'\n'*'\n'+'\n','\n'-'\n'.'\n'/'\n'0'\n'1'\n'2'\n'3'\n'4'\n'5'\n'6'\n'7'\n'8'\n'9'\n':'\n';'\n'<'\n'='\n'>'\n'?'\n'@'\n'A'\n'B'\n'C'\n'D'\n'E'\n'F'\n'G'\n'H'\n'I'\n'J'\n'K'\n'L'\n'M'\n'N'\n'O'\n'P'\n'Q'\n'R'\n'S'\n'T'\n'U'\n'V'\n'W'\n'X'\n'Y'\n'Z'\n'['\n'\\\\'\n']'\n'^'\n'_'\n'`'\n'a'\n'b'\n'c'\n'd'\n'e'\n'f'\n'g'\n'h'\n'i'\n'j'\n'k'\n'l'\n'm'\n'n'\n'o'\n'p'\n'q'\n'r'\n's'\n't'\n'u'\n'v'\n'w'\n'x'\n'y'\n'z'\n'{'\n'|'\n'}'\n'~'\n'\\x7f'\n'\\x80'\n'\\x81'\n'\\x82'\n'\\x83'\n'\\x84'\n'\\x85'\n'\\x86'\n'\\x87'\n'\\x88'\n'\\x89'\n'\\x8a'\n'\\x8b'\n'\\x8c'\n'\\x8d'\n'\\x8e'\n'\\x8f'\n'\\x90'\n'\\x91'\n'\\x92'\n'\\x93'\n'\\x94'\n'\\x95'\n'\\x96'\n'\\x97'\n'\\x98'\n'\\x99'\n'\\x9a'\n'\\x9b'\n'\\x9c'\n'\\x9d'\n'\\x9e'\n'\\x9f'\n'\\xa0'\n'\\u2018'\n'\\u2019'\n'\\xa3'\n'\\u20ac'\n'\\u20af'\n'\\xa6'\n'\\xa7'\n'\\xa8'\n'\\xa9'\n'\\u037a'\n'\\xab'\n'\\xac'\n'\\xad'\n'\\ufffe'\n'\\u2015'\n'\\xb0'\n'\\xb1'\n'\\xb2'\n'\\xb3'\n'\\u0384'\n'\\u0385'\n'\\u0386'\n'\\xb7'\n'\\u0388'\n'\\u0389'\n'\\u038a'\n'\\xbb'\n'\\u038c'\n'\\xbd'\n'\\u038e'\n'\\u038f'\n'\\u0390'\n'\\u0391'\n'\\u0392'\n'\\u0393'\n'\\u0394'\n'\\u0395'\n'\\u0396'\n'\\u0397'\n'\\u0398'\n'\\u0399'\n'\\u039a'\n'\\u039b'\n'\\u039c'\n'\\u039d'\n'\\u039e'\n'\\u039f'\n'\\u03a0'\n'\\u03a1'\n'\\ufffe'\n'\\u03a3'\n'\\u03a4'\n'\\u03a5'\n'\\u03a6'\n'\\u03a7'\n'\\u03a8'\n'\\u03a9'\n'\\u03aa'\n'\\u03ab'\n'\\u03ac'\n'\\u03ad'\n'\\u03ae'\n'\\u03af'\n'\\u03b0'\n'\\u03b1'\n'\\u03b2'\n'\\u03b3'\n'\\u03b4'\n'\\u03b5'\n'\\u03b6'\n'\\u03b7'\n'\\u03b8'\n'\\u03b9'\n'\\u03ba'\n'\\u03bb'\n'\\u03bc'\n'\\u03bd'\n'\\u03be'\n'\\u03bf'\n'\\u03c0'\n'\\u03c1'\n'\\u03c2'\n'\\u03c3'\n'\\u03c4'\n'\\u03c5'\n'\\u03c6'\n'\\u03c7'\n'\\u03c8'\n'\\u03c9'\n'\\u03ca'\n'\\u03cb'\n'\\u03cc'\n'\\u03cd'\n'\\u03ce'\n'\\ufffe'\n)\n\n\nencoding_table=codecs.charmap_build(decoding_table)\n"], "struct": [".py", "__all__=[\n\n'calcsize','pack','pack_into','unpack','unpack_from',\n\n\n'Struct',\n\n\n'error'\n]\n\nfrom _struct import *\nfrom _struct import _clearcache\nfrom _struct import __doc__\n"], "tarfile": [".py", "#!/usr/bin/env python3\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n''\n\n\nversion=\"0.9.0\"\n__author__=\"Lars Gust\\u00e4bel (lars@gustaebel.de)\"\n__date__=\"$Date: 2011-02-25 17:42:01 +0200 (Fri, 25 Feb 2011) $\"\n__cvsid__=\"$Id: tarfile.py 88586 2011-02-25 15:42:01Z marc-andre.lemburg $\"\n__credits__=\"Gustavo Niemeyer, Niels Gust\\u00e4bel, Richard Townsend.\"\n\n\n\n\nimport sys\nimport os\nimport io\nimport shutil\nimport stat\nimport time\nimport struct\nimport copy\nimport re\n\ntry :\n import grp,pwd\nexcept ImportError:\n grp=pwd=None\n \n \nsymlink_exception=(AttributeError,NotImplementedError)\ntry :\n\n\n symlink_exception +=(WindowsError,)\nexcept NameError:\n pass\n \n \n__all__=[\"TarFile\",\"TarInfo\",\"is_tarfile\",\"TarError\"]\n\nfrom builtins import open as _open\n\n\n\n\nNUL=b\"\\0\"\nBLOCKSIZE=512\nRECORDSIZE=BLOCKSIZE *20\nGNU_MAGIC=b\"ustar \\0\"\nPOSIX_MAGIC=b\"ustar\\x0000\"\n\nLENGTH_NAME=100\nLENGTH_LINK=100\nLENGTH_PREFIX=155\n\nREGTYPE=b\"0\"\nAREGTYPE=b\"\\0\"\nLNKTYPE=b\"1\"\nSYMTYPE=b\"2\"\nCHRTYPE=b\"3\"\nBLKTYPE=b\"4\"\nDIRTYPE=b\"5\"\nFIFOTYPE=b\"6\"\nCONTTYPE=b\"7\"\n\nGNUTYPE_LONGNAME=b\"L\"\nGNUTYPE_LONGLINK=b\"K\"\nGNUTYPE_SPARSE=b\"S\"\n\nXHDTYPE=b\"x\"\nXGLTYPE=b\"g\"\nSOLARIS_XHDTYPE=b\"X\"\n\nUSTAR_FORMAT=0\nGNU_FORMAT=1\nPAX_FORMAT=2\nDEFAULT_FORMAT=GNU_FORMAT\n\n\n\n\n\nSUPPORTED_TYPES=(REGTYPE,AREGTYPE,LNKTYPE,\nSYMTYPE,DIRTYPE,FIFOTYPE,\nCONTTYPE,CHRTYPE,BLKTYPE,\nGNUTYPE_LONGNAME,GNUTYPE_LONGLINK,\nGNUTYPE_SPARSE)\n\n\nREGULAR_TYPES=(REGTYPE,AREGTYPE,\nCONTTYPE,GNUTYPE_SPARSE)\n\n\nGNU_TYPES=(GNUTYPE_LONGNAME,GNUTYPE_LONGLINK,\nGNUTYPE_SPARSE)\n\n\nPAX_FIELDS=(\"path\",\"linkpath\",\"size\",\"mtime\",\n\"uid\",\"gid\",\"uname\",\"gname\")\n\n\nPAX_NAME_FIELDS={\"path\",\"linkpath\",\"uname\",\"gname\"}\n\n\n\nPAX_NUMBER_FIELDS={\n\"atime\":float,\n\"ctime\":float,\n\"mtime\":float,\n\"uid\":int,\n\"gid\":int,\n\"size\":int\n}\n\n\n\n\nS_IFLNK=0o120000\nS_IFREG=0o100000\nS_IFBLK=0o060000\nS_IFDIR=0o040000\nS_IFCHR=0o020000\nS_IFIFO=0o010000\n\nTSUID=0o4000\nTSGID=0o2000\nTSVTX=0o1000\n\nTUREAD=0o400\nTUWRITE=0o200\nTUEXEC=0o100\nTGREAD=0o040\nTGWRITE=0o020\nTGEXEC=0o010\nTOREAD=0o004\nTOWRITE=0o002\nTOEXEC=0o001\n\n\n\n\nif os.name in (\"nt\",\"ce\"):\n ENCODING=\"utf-8\"\nelse :\n ENCODING=sys.getfilesystemencoding()\n \n \n \n \n \ndef stn(s,length,encoding,errors):\n ''\n \n s=s.encode(encoding,errors)\n return s[:length]+(length -len(s))*NUL\n \ndef nts(s,encoding,errors):\n ''\n \n p=s.find(b\"\\0\")\n if p !=-1:\n s=s[:p]\n return s.decode(encoding,errors)\n \ndef nti(s):\n ''\n \n \n \n if s[0]in (0o200,0o377):\n n=0\n for i in range(len(s)-1):\n n <<=8\n n +=s[i+1]\n if s[0]==0o377:\n n=-(256 **(len(s)-1)-n)\n else :\n try :\n n=int(nts(s,\"ascii\",\"strict\")or\"0\",8)\n except ValueError:\n raise InvalidHeaderError(\"invalid header\")\n return n\n \ndef itn(n,digits=8,format=DEFAULT_FORMAT):\n ''\n \n \n \n \n \n \n \n \n \n if 0 <=n <8 **(digits -1):\n s=bytes(\"%0*o\"%(digits -1,n),\"ascii\")+NUL\n elif format ==GNU_FORMAT and -256 **(digits -1)<=n <256 **(digits -1):\n if n >=0:\n s=bytearray([0o200])\n else :\n s=bytearray([0o377])\n n=256 **digits+n\n \n for i in range(digits -1):\n s.insert(1,n&0o377)\n n >>=8\n else :\n raise ValueError(\"overflow in number field\")\n \n return s\n \ndef calc_chksums(buf):\n ''\n\n\n\n\n\n\n \n unsigned_chksum=256+sum(struct.unpack_from(\"148B8x356B\",buf))\n signed_chksum=256+sum(struct.unpack_from(\"148b8x356b\",buf))\n return unsigned_chksum,signed_chksum\n \ndef copyfileobj(src,dst,length=None ):\n ''\n\n \n if length ==0:\n return\n if length is None :\n shutil.copyfileobj(src,dst)\n return\n \n BUFSIZE=16 *1024\n blocks,remainder=divmod(length,BUFSIZE)\n for b in range(blocks):\n buf=src.read(BUFSIZE)\n if len(buf)self.bufsize:\n self.fileobj.write(self.buf[:self.bufsize])\n self.buf=self.buf[self.bufsize:]\n \n def close(self):\n ''\n\n \n if self.closed:\n return\n \n if self.mode ==\"w\"and self.comptype !=\"tar\":\n self.buf +=self.cmp.flush()\n \n if self.mode ==\"w\"and self.buf:\n self.fileobj.write(self.buf)\n self.buf=b\"\"\n if self.comptype ==\"gz\":\n \n \n \n \n \n \n self.fileobj.write(struct.pack(\"=0:\n blocks,remainder=divmod(pos -self.pos,self.bufsize)\n for i in range(blocks):\n self.read(self.bufsize)\n self.read(remainder)\n else :\n raise StreamError(\"seeking backwards is not allowed\")\n return self.pos\n \n def read(self,size=None ):\n ''\n\n\n \n if size is None :\n t=[]\n while True :\n buf=self._read(self.bufsize)\n if not buf:\n break\n t.append(buf)\n buf=\"\".join(t)\n else :\n buf=self._read(size)\n self.pos +=len(buf)\n return buf\n \n def _read(self,size):\n ''\n \n if self.comptype ==\"tar\":\n return self.__read(size)\n \n c=len(self.dbuf)\n while c lastpos:\n self.map.append((False ,lastpos,offset,None ))\n self.map.append((True ,offset,offset+size,realpos))\n realpos +=size\n lastpos=offset+size\n if lastpos 0:\n while True :\n data,start,stop,offset=self.map[self.map_index]\n if start <=self.position \"%(self.__class__.__name__,self.name,id(self))\n \n def get_info(self):\n ''\n \n info={\n \"name\":self.name,\n \"mode\":self.mode&0o7777,\n \"uid\":self.uid,\n \"gid\":self.gid,\n \"size\":self.size,\n \"mtime\":self.mtime,\n \"chksum\":self.chksum,\n \"type\":self.type,\n \"linkname\":self.linkname,\n \"uname\":self.uname,\n \"gname\":self.gname,\n \"devmajor\":self.devmajor,\n \"devminor\":self.devminor\n }\n \n if info[\"type\"]==DIRTYPE and not info[\"name\"].endswith(\"/\"):\n info[\"name\"]+=\"/\"\n \n return info\n \n def tobuf(self,format=DEFAULT_FORMAT,encoding=ENCODING,errors=\"surrogateescape\"):\n ''\n \n info=self.get_info()\n \n if format ==USTAR_FORMAT:\n return self.create_ustar_header(info,encoding,errors)\n elif format ==GNU_FORMAT:\n return self.create_gnu_header(info,encoding,errors)\n elif format ==PAX_FORMAT:\n return self.create_pax_header(info,encoding)\n else :\n raise ValueError(\"invalid format\")\n \n def create_ustar_header(self,info,encoding,errors):\n ''\n \n info[\"magic\"]=POSIX_MAGIC\n \n if len(info[\"linkname\"])>LENGTH_LINK:\n raise ValueError(\"linkname is too long\")\n \n if len(info[\"name\"])>LENGTH_NAME:\n info[\"prefix\"],info[\"name\"]=self._posix_split_name(info[\"name\"])\n \n return self._create_header(info,USTAR_FORMAT,encoding,errors)\n \n def create_gnu_header(self,info,encoding,errors):\n ''\n \n info[\"magic\"]=GNU_MAGIC\n \n buf=b\"\"\n if len(info[\"linkname\"])>LENGTH_LINK:\n buf +=self._create_gnu_long_header(info[\"linkname\"],GNUTYPE_LONGLINK,encoding,errors)\n \n if len(info[\"name\"])>LENGTH_NAME:\n buf +=self._create_gnu_long_header(info[\"name\"],GNUTYPE_LONGNAME,encoding,errors)\n \n return buf+self._create_header(info,GNU_FORMAT,encoding,errors)\n \n def create_pax_header(self,info,encoding):\n ''\n\n\n \n info[\"magic\"]=POSIX_MAGIC\n pax_headers=self.pax_headers.copy()\n \n \n \n for name,hname,length in (\n (\"name\",\"path\",LENGTH_NAME),(\"linkname\",\"linkpath\",LENGTH_LINK),\n (\"uname\",\"uname\",32),(\"gname\",\"gname\",32)):\n \n if hname in pax_headers:\n \n continue\n \n \n try :\n info[name].encode(\"ascii\",\"strict\")\n except UnicodeEncodeError:\n pax_headers[hname]=info[name]\n continue\n \n if len(info[name])>length:\n pax_headers[hname]=info[name]\n \n \n \n for name,digits in ((\"uid\",8),(\"gid\",8),(\"size\",12),(\"mtime\",12)):\n if name in pax_headers:\n \n info[name]=0\n continue\n \n val=info[name]\n if not 0 <=val <8 **(digits -1)or isinstance(val,float):\n pax_headers[name]=str(val)\n info[name]=0\n \n \n if pax_headers:\n buf=self._create_pax_generic_header(pax_headers,XHDTYPE,encoding)\n else :\n buf=b\"\"\n \n return buf+self._create_header(info,USTAR_FORMAT,\"ascii\",\"replace\")\n \n @classmethod\n def create_pax_global_header(cls,pax_headers):\n ''\n \n return cls._create_pax_generic_header(pax_headers,XGLTYPE,\"utf-8\")\n \n def _posix_split_name(self,name):\n ''\n\n \n prefix=name[:LENGTH_PREFIX+1]\n while prefix and prefix[-1]!=\"/\":\n prefix=prefix[:-1]\n \n name=name[len(prefix):]\n prefix=prefix[:-1]\n \n if not prefix or len(name)>LENGTH_NAME:\n raise ValueError(\"name is too long\")\n return prefix,name\n \n @staticmethod\n def _create_header(info,format,encoding,errors):\n ''\n\n \n parts=[\n stn(info.get(\"name\",\"\"),100,encoding,errors),\n itn(info.get(\"mode\",0)&0o7777,8,format),\n itn(info.get(\"uid\",0),8,format),\n itn(info.get(\"gid\",0),8,format),\n itn(info.get(\"size\",0),12,format),\n itn(info.get(\"mtime\",0),12,format),\n b\" \",\n info.get(\"type\",REGTYPE),\n stn(info.get(\"linkname\",\"\"),100,encoding,errors),\n info.get(\"magic\",POSIX_MAGIC),\n stn(info.get(\"uname\",\"\"),32,encoding,errors),\n stn(info.get(\"gname\",\"\"),32,encoding,errors),\n itn(info.get(\"devmajor\",0),8,format),\n itn(info.get(\"devminor\",0),8,format),\n stn(info.get(\"prefix\",\"\"),155,encoding,errors)\n ]\n \n buf=struct.pack(\"%ds\"%BLOCKSIZE,b\"\".join(parts))\n chksum=calc_chksums(buf[-BLOCKSIZE:])[0]\n buf=buf[:-364]+bytes(\"%06o\\0\"%chksum,\"ascii\")+buf[-357:]\n return buf\n \n @staticmethod\n def _create_payload(payload):\n ''\n\n \n blocks,remainder=divmod(len(payload),BLOCKSIZE)\n if remainder >0:\n payload +=(BLOCKSIZE -remainder)*NUL\n return payload\n \n @classmethod\n def _create_gnu_long_header(cls,name,type,encoding,errors):\n ''\n\n \n name=name.encode(encoding,errors)+NUL\n \n info={}\n info[\"name\"]=\"././@LongLink\"\n info[\"type\"]=type\n info[\"size\"]=len(name)\n info[\"magic\"]=GNU_MAGIC\n \n \n return cls._create_header(info,USTAR_FORMAT,encoding,errors)+ cls._create_payload(name)\n \n @classmethod\n def _create_pax_generic_header(cls,pax_headers,type,encoding):\n ''\n\n\n \n \n \n binary=False\n for keyword,value in pax_headers.items():\n try :\n value.encode(\"utf-8\",\"strict\")\n except UnicodeEncodeError:\n binary=True\n break\n \n records=b\"\"\n if binary:\n \n records +=b\"21 hdrcharset=BINARY\\n\"\n \n for keyword,value in pax_headers.items():\n keyword=keyword.encode(\"utf-8\")\n if binary:\n \n \n value=value.encode(encoding,\"surrogateescape\")\n else :\n value=value.encode(\"utf-8\")\n \n l=len(keyword)+len(value)+3\n n=p=0\n while True :\n n=l+len(str(p))\n if n ==p:\n break\n p=n\n records +=bytes(str(p),\"ascii\")+b\" \"+keyword+b\"=\"+value+b\"\\n\"\n \n \n \n info={}\n info[\"name\"]=\"././@PaxHeader\"\n info[\"type\"]=type\n info[\"size\"]=len(records)\n info[\"magic\"]=POSIX_MAGIC\n \n \n return cls._create_header(info,USTAR_FORMAT,\"ascii\",\"replace\")+ cls._create_payload(records)\n \n @classmethod\n def frombuf(cls,buf,encoding,errors):\n ''\n \n if len(buf)==0:\n raise EmptyHeaderError(\"empty header\")\n if len(buf)!=BLOCKSIZE:\n raise TruncatedHeaderError(\"truncated header\")\n if buf.count(NUL)==BLOCKSIZE:\n raise EOFHeaderError(\"end of file header\")\n \n chksum=nti(buf[148:156])\n if chksum not in calc_chksums(buf):\n raise InvalidHeaderError(\"bad checksum\")\n \n obj=cls()\n obj.name=nts(buf[0:100],encoding,errors)\n obj.mode=nti(buf[100:108])\n obj.uid=nti(buf[108:116])\n obj.gid=nti(buf[116:124])\n obj.size=nti(buf[124:136])\n obj.mtime=nti(buf[136:148])\n obj.chksum=chksum\n obj.type=buf[156:157]\n obj.linkname=nts(buf[157:257],encoding,errors)\n obj.uname=nts(buf[265:297],encoding,errors)\n obj.gname=nts(buf[297:329],encoding,errors)\n obj.devmajor=nti(buf[329:337])\n obj.devminor=nti(buf[337:345])\n prefix=nts(buf[345:500],encoding,errors)\n \n \n \n if obj.type ==AREGTYPE and obj.name.endswith(\"/\"):\n obj.type=DIRTYPE\n \n \n \n \n if obj.type ==GNUTYPE_SPARSE:\n pos=386\n structs=[]\n for i in range(4):\n try :\n offset=nti(buf[pos:pos+12])\n numbytes=nti(buf[pos+12:pos+24])\n except ValueError:\n break\n structs.append((offset,numbytes))\n pos +=24\n isextended=bool(buf[482])\n origsize=nti(buf[483:495])\n obj._sparse_structs=(structs,isextended,origsize)\n \n \n if obj.isdir():\n obj.name=obj.name.rstrip(\"/\")\n \n \n if prefix and obj.type not in GNU_TYPES:\n obj.name=prefix+\"/\"+obj.name\n return obj\n \n @classmethod\n def fromtarfile(cls,tarfile):\n ''\n\n \n buf=tarfile.fileobj.read(BLOCKSIZE)\n obj=cls.frombuf(buf,tarfile.encoding,tarfile.errors)\n obj.offset=tarfile.fileobj.tell()-BLOCKSIZE\n return obj._proc_member(tarfile)\n \n \n \n \n \n \n \n \n \n \n \n \n def _proc_member(self,tarfile):\n ''\n\n \n if self.type in (GNUTYPE_LONGNAME,GNUTYPE_LONGLINK):\n return self._proc_gnulong(tarfile)\n elif self.type ==GNUTYPE_SPARSE:\n return self._proc_sparse(tarfile)\n elif self.type in (XHDTYPE,XGLTYPE,SOLARIS_XHDTYPE):\n return self._proc_pax(tarfile)\n else :\n return self._proc_builtin(tarfile)\n \n def _proc_builtin(self,tarfile):\n ''\n\n \n self.offset_data=tarfile.fileobj.tell()\n offset=self.offset_data\n if self.isreg()or self.type not in SUPPORTED_TYPES:\n \n offset +=self._block(self.size)\n tarfile.offset=offset\n \n \n \n self._apply_pax_info(tarfile.pax_headers,tarfile.encoding,tarfile.errors)\n \n return self\n \n def _proc_gnulong(self,tarfile):\n ''\n\n \n buf=tarfile.fileobj.read(self._block(self.size))\n \n \n try :\n next=self.fromtarfile(tarfile)\n except HeaderError:\n raise SubsequentHeaderError(\"missing or bad subsequent header\")\n \n \n \n next.offset=self.offset\n if self.type ==GNUTYPE_LONGNAME:\n next.name=nts(buf,tarfile.encoding,tarfile.errors)\n elif self.type ==GNUTYPE_LONGLINK:\n next.linkname=nts(buf,tarfile.encoding,tarfile.errors)\n \n return next\n \n def _proc_sparse(self,tarfile):\n ''\n \n \n structs,isextended,origsize=self._sparse_structs\n del self._sparse_structs\n \n \n while isextended:\n buf=tarfile.fileobj.read(BLOCKSIZE)\n pos=0\n for i in range(21):\n try :\n offset=nti(buf[pos:pos+12])\n numbytes=nti(buf[pos+12:pos+24])\n except ValueError:\n break\n if offset and numbytes:\n structs.append((offset,numbytes))\n pos +=24\n isextended=bool(buf[504])\n self.sparse=structs\n \n self.offset_data=tarfile.fileobj.tell()\n tarfile.offset=self.offset_data+self._block(self.size)\n self.size=origsize\n return self\n \n def _proc_pax(self,tarfile):\n ''\n\n \n \n buf=tarfile.fileobj.read(self._block(self.size))\n \n \n \n \n if self.type ==XGLTYPE:\n pax_headers=tarfile.pax_headers\n else :\n pax_headers=tarfile.pax_headers.copy()\n \n \n \n \n \n \n match=re.search(br\"\\d+ hdrcharset=([^\\n]+)\\n\",buf)\n if match is not None :\n pax_headers[\"hdrcharset\"]=match.group(1).decode(\"utf-8\")\n \n \n \n \n hdrcharset=pax_headers.get(\"hdrcharset\")\n if hdrcharset ==\"BINARY\":\n encoding=tarfile.encoding\n else :\n encoding=\"utf-8\"\n \n \n \n \n \n regex=re.compile(br\"(\\d+) ([^=]+)=\")\n pos=0\n while True :\n match=regex.match(buf,pos)\n if not match:\n break\n \n length,keyword=match.groups()\n length=int(length)\n value=buf[match.end(2)+1:match.start(1)+length -1]\n \n \n \n \n \n \n \n \n keyword=self._decode_pax_field(keyword,\"utf-8\",\"utf-8\",\n tarfile.errors)\n if keyword in PAX_NAME_FIELDS:\n value=self._decode_pax_field(value,encoding,tarfile.encoding,\n tarfile.errors)\n else :\n value=self._decode_pax_field(value,\"utf-8\",\"utf-8\",\n tarfile.errors)\n \n pax_headers[keyword]=value\n pos +=length\n \n \n try :\n next=self.fromtarfile(tarfile)\n except HeaderError:\n raise SubsequentHeaderError(\"missing or bad subsequent header\")\n \n \n if\"GNU.sparse.map\"in pax_headers:\n \n self._proc_gnusparse_01(next,pax_headers)\n \n elif\"GNU.sparse.size\"in pax_headers:\n \n self._proc_gnusparse_00(next,pax_headers,buf)\n \n elif pax_headers.get(\"GNU.sparse.major\")==\"1\"and pax_headers.get(\"GNU.sparse.minor\")==\"0\":\n \n self._proc_gnusparse_10(next,pax_headers,tarfile)\n \n if self.type in (XHDTYPE,SOLARIS_XHDTYPE):\n \n next._apply_pax_info(pax_headers,tarfile.encoding,tarfile.errors)\n next.offset=self.offset\n \n if\"size\"in pax_headers:\n \n \n \n offset=next.offset_data\n if next.isreg()or next.type not in SUPPORTED_TYPES:\n offset +=next._block(next.size)\n tarfile.offset=offset\n \n return next\n \n def _proc_gnusparse_00(self,next,pax_headers,buf):\n ''\n \n offsets=[]\n for match in re.finditer(br\"\\d+ GNU.sparse.offset=(\\d+)\\n\",buf):\n offsets.append(int(match.group(1)))\n numbytes=[]\n for match in re.finditer(br\"\\d+ GNU.sparse.numbytes=(\\d+)\\n\",buf):\n numbytes.append(int(match.group(1)))\n next.sparse=list(zip(offsets,numbytes))\n \n def _proc_gnusparse_01(self,next,pax_headers):\n ''\n \n sparse=[int(x)for x in pax_headers[\"GNU.sparse.map\"].split(\",\")]\n next.sparse=list(zip(sparse[::2],sparse[1::2]))\n \n def _proc_gnusparse_10(self,next,pax_headers,tarfile):\n ''\n \n fields=None\n sparse=[]\n buf=tarfile.fileobj.read(BLOCKSIZE)\n fields,buf=buf.split(b\"\\n\",1)\n fields=int(fields)\n while len(sparse)1 or mode not in\"raw\":\n raise ValueError(\"mode must be 'r', 'a' or 'w'\")\n self.mode=mode\n self._mode={\"r\":\"rb\",\"a\":\"r+b\",\"w\":\"wb\"}[mode]\n \n if not fileobj:\n if self.mode ==\"a\"and not os.path.exists(name):\n \n self.mode=\"w\"\n self._mode=\"wb\"\n fileobj=bltn_open(name,self._mode)\n self._extfileobj=False\n else :\n if name is None and hasattr(fileobj,\"name\"):\n name=fileobj.name\n if hasattr(fileobj,\"mode\"):\n self._mode=fileobj.mode\n self._extfileobj=True\n self.name=os.path.abspath(name)if name else None\n self.fileobj=fileobj\n \n \n if format is not None :\n self.format=format\n if tarinfo is not None :\n self.tarinfo=tarinfo\n if dereference is not None :\n self.dereference=dereference\n if ignore_zeros is not None :\n self.ignore_zeros=ignore_zeros\n if encoding is not None :\n self.encoding=encoding\n self.errors=errors\n \n if pax_headers is not None and self.format ==PAX_FORMAT:\n self.pax_headers=pax_headers\n else :\n self.pax_headers={}\n \n if debug is not None :\n self.debug=debug\n if errorlevel is not None :\n self.errorlevel=errorlevel\n \n \n self.closed=False\n self.members=[]\n self._loaded=False\n self.offset=self.fileobj.tell()\n \n self.inodes={}\n \n \n try :\n if self.mode ==\"r\":\n self.firstmember=None\n self.firstmember=self.next()\n \n if self.mode ==\"a\":\n \n \n while True :\n self.fileobj.seek(self.offset)\n try :\n tarinfo=self.tarinfo.fromtarfile(self)\n self.members.append(tarinfo)\n except EOFHeaderError:\n self.fileobj.seek(self.offset)\n break\n except HeaderError as e:\n raise ReadError(str(e))\n \n if self.mode in\"aw\":\n self._loaded=True\n \n if self.pax_headers:\n buf=self.tarinfo.create_pax_global_header(self.pax_headers.copy())\n self.fileobj.write(buf)\n self.offset +=len(buf)\n except :\n if not self._extfileobj:\n self.fileobj.close()\n self.closed=True\n raise\n \n \n \n \n \n \n \n \n \n \n \n \n @classmethod\n def open(cls,name=None ,mode=\"r\",fileobj=None ,bufsize=RECORDSIZE,**kwargs):\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n \n if not name and not fileobj:\n raise ValueError(\"nothing to open\")\n \n if mode in (\"r\",\"r:*\"):\n \n for comptype in cls.OPEN_METH:\n func=getattr(cls,cls.OPEN_METH[comptype])\n if fileobj is not None :\n saved_pos=fileobj.tell()\n try :\n return func(name,\"r\",fileobj,**kwargs)\n except (ReadError,CompressionError)as e:\n if fileobj is not None :\n fileobj.seek(saved_pos)\n continue\n raise ReadError(\"file could not be opened successfully\")\n \n elif\":\"in mode:\n filemode,comptype=mode.split(\":\",1)\n filemode=filemode or\"r\"\n comptype=comptype or\"tar\"\n \n \n \n if comptype in cls.OPEN_METH:\n func=getattr(cls,cls.OPEN_METH[comptype])\n else :\n raise CompressionError(\"unknown compression type %r\"%comptype)\n return func(name,filemode,fileobj,**kwargs)\n \n elif\"|\"in mode:\n filemode,comptype=mode.split(\"|\",1)\n filemode=filemode or\"r\"\n comptype=comptype or\"tar\"\n \n if filemode not in\"rw\":\n raise ValueError(\"mode must be 'r' or 'w'\")\n \n stream=_Stream(name,filemode,comptype,fileobj,bufsize)\n try :\n t=cls(name,filemode,stream,**kwargs)\n except :\n stream.close()\n raise\n t._extfileobj=False\n return t\n \n elif mode in\"aw\":\n return cls.taropen(name,mode,fileobj,**kwargs)\n \n raise ValueError(\"undiscernible mode\")\n \n @classmethod\n def taropen(cls,name,mode=\"r\",fileobj=None ,**kwargs):\n ''\n \n if len(mode)>1 or mode not in\"raw\":\n raise ValueError(\"mode must be 'r', 'a' or 'w'\")\n return cls(name,mode,fileobj,**kwargs)\n \n @classmethod\n def gzopen(cls,name,mode=\"r\",fileobj=None ,compresslevel=9,**kwargs):\n ''\n\n \n if len(mode)>1 or mode not in\"rw\":\n raise ValueError(\"mode must be 'r' or 'w'\")\n \n try :\n import gzip\n gzip.GzipFile\n except (ImportError,AttributeError):\n raise CompressionError(\"gzip module is not available\")\n \n extfileobj=fileobj is not None\n try :\n fileobj=gzip.GzipFile(name,mode+\"b\",compresslevel,fileobj)\n t=cls.taropen(name,mode,fileobj,**kwargs)\n except IOError:\n if not extfileobj and fileobj is not None :\n fileobj.close()\n if fileobj is None :\n raise\n raise ReadError(\"not a gzip file\")\n except :\n if not extfileobj and fileobj is not None :\n fileobj.close()\n raise\n t._extfileobj=extfileobj\n return t\n \n @classmethod\n def bz2open(cls,name,mode=\"r\",fileobj=None ,compresslevel=9,**kwargs):\n ''\n\n \n if len(mode)>1 or mode not in\"rw\":\n raise ValueError(\"mode must be 'r' or 'w'.\")\n \n try :\n import bz2\n except ImportError:\n raise CompressionError(\"bz2 module is not available\")\n \n fileobj=bz2.BZ2File(fileobj or name,mode,\n compresslevel=compresslevel)\n \n try :\n t=cls.taropen(name,mode,fileobj,**kwargs)\n except (IOError,EOFError):\n fileobj.close()\n raise ReadError(\"not a bzip2 file\")\n t._extfileobj=False\n return t\n \n @classmethod\n def xzopen(cls,name,mode=\"r\",fileobj=None ,preset=None ,**kwargs):\n ''\n\n \n if mode not in (\"r\",\"w\"):\n raise ValueError(\"mode must be 'r' or 'w'\")\n \n try :\n import lzma\n except ImportError:\n raise CompressionError(\"lzma module is not available\")\n \n fileobj=lzma.LZMAFile(fileobj or name,mode,preset=preset)\n \n try :\n t=cls.taropen(name,mode,fileobj,**kwargs)\n except (lzma.LZMAError,EOFError):\n fileobj.close()\n raise ReadError(\"not an lzma file\")\n t._extfileobj=False\n return t\n \n \n OPEN_METH={\n \"tar\":\"taropen\",\n \"gz\":\"gzopen\",\n \"bz2\":\"bz2open\",\n \"xz\":\"xzopen\"\n }\n \n \n \n \n def close(self):\n ''\n\n \n if self.closed:\n return\n \n if self.mode in\"aw\":\n self.fileobj.write(NUL *(BLOCKSIZE *2))\n self.offset +=(BLOCKSIZE *2)\n \n \n blocks,remainder=divmod(self.offset,RECORDSIZE)\n if remainder >0:\n self.fileobj.write(NUL *(RECORDSIZE -remainder))\n \n if not self._extfileobj:\n self.fileobj.close()\n self.closed=True\n \n def getmember(self,name):\n ''\n\n\n\n \n tarinfo=self._getmember(name)\n if tarinfo is None :\n raise KeyError(\"filename %r not found\"%name)\n return tarinfo\n \n def getmembers(self):\n ''\n\n \n self._check()\n if not self._loaded:\n self._load()\n \n return self.members\n \n def getnames(self):\n ''\n\n \n return [tarinfo.name for tarinfo in self.getmembers()]\n \n def gettarinfo(self,name=None ,arcname=None ,fileobj=None ):\n ''\n\n\n\n\n \n self._check(\"aw\")\n \n \n \n if fileobj is not None :\n name=fileobj.name\n \n \n \n \n if arcname is None :\n arcname=name\n drv,arcname=os.path.splitdrive(arcname)\n arcname=arcname.replace(os.sep,\"/\")\n arcname=arcname.lstrip(\"/\")\n \n \n \n tarinfo=self.tarinfo()\n tarinfo.tarfile=self\n \n \n \n if fileobj is None :\n if hasattr(os,\"lstat\")and not self.dereference:\n statres=os.lstat(name)\n else :\n statres=os.stat(name)\n else :\n statres=os.fstat(fileobj.fileno())\n linkname=\"\"\n \n stmd=statres.st_mode\n if stat.S_ISREG(stmd):\n inode=(statres.st_ino,statres.st_dev)\n if not self.dereference and statres.st_nlink >1 and inode in self.inodes and arcname !=self.inodes[inode]:\n \n \n type=LNKTYPE\n linkname=self.inodes[inode]\n else :\n \n \n type=REGTYPE\n if inode[0]:\n self.inodes[inode]=arcname\n elif stat.S_ISDIR(stmd):\n type=DIRTYPE\n elif stat.S_ISFIFO(stmd):\n type=FIFOTYPE\n elif stat.S_ISLNK(stmd):\n type=SYMTYPE\n linkname=os.readlink(name)\n elif stat.S_ISCHR(stmd):\n type=CHRTYPE\n elif stat.S_ISBLK(stmd):\n type=BLKTYPE\n else :\n return None\n \n \n \n tarinfo.name=arcname\n tarinfo.mode=stmd\n tarinfo.uid=statres.st_uid\n tarinfo.gid=statres.st_gid\n if type ==REGTYPE:\n tarinfo.size=statres.st_size\n else :\n tarinfo.size=0\n tarinfo.mtime=statres.st_mtime\n tarinfo.type=type\n tarinfo.linkname=linkname\n if pwd:\n try :\n tarinfo.uname=pwd.getpwuid(tarinfo.uid)[0]\n except KeyError:\n pass\n if grp:\n try :\n tarinfo.gname=grp.getgrgid(tarinfo.gid)[0]\n except KeyError:\n pass\n \n if type in (CHRTYPE,BLKTYPE):\n if hasattr(os,\"major\")and hasattr(os,\"minor\"):\n tarinfo.devmajor=os.major(statres.st_rdev)\n tarinfo.devminor=os.minor(statres.st_rdev)\n return tarinfo\n \n def list(self,verbose=True ):\n ''\n\n\n \n self._check()\n \n for tarinfo in self:\n if verbose:\n print(stat.filemode(tarinfo.mode),end=' ')\n print(\"%s/%s\"%(tarinfo.uname or tarinfo.uid,\n tarinfo.gname or tarinfo.gid),end=' ')\n if tarinfo.ischr()or tarinfo.isblk():\n print(\"%10s\"%(\"%d,%d\" %(tarinfo.devmajor,tarinfo.devminor)),end=' ')\n else :\n print(\"%10d\"%tarinfo.size,end=' ')\n print(\"%d-%02d-%02d %02d:%02d:%02d\" %time.localtime(tarinfo.mtime)[:6],end=' ')\n \n print(tarinfo.name+(\"/\"if tarinfo.isdir()else\"\"),end=' ')\n \n if verbose:\n if tarinfo.issym():\n print(\"->\",tarinfo.linkname,end=' ')\n if tarinfo.islnk():\n print(\"link to\",tarinfo.linkname,end=' ')\n print()\n \n def add(self,name,arcname=None ,recursive=True ,exclude=None ,*,filter=None ):\n ''\n\n\n\n\n\n\n\n\n \n self._check(\"aw\")\n \n if arcname is None :\n arcname=name\n \n \n if exclude is not None :\n import warnings\n warnings.warn(\"use the filter argument instead\",\n DeprecationWarning,2)\n if exclude(name):\n self._dbg(2,\"tarfile: Excluded %r\"%name)\n return\n \n \n if self.name is not None and os.path.abspath(name)==self.name:\n self._dbg(2,\"tarfile: Skipped %r\"%name)\n return\n \n self._dbg(1,name)\n \n \n tarinfo=self.gettarinfo(name,arcname)\n \n if tarinfo is None :\n self._dbg(1,\"tarfile: Unsupported type %r\"%name)\n return\n \n \n if filter is not None :\n tarinfo=filter(tarinfo)\n if tarinfo is None :\n self._dbg(2,\"tarfile: Excluded %r\"%name)\n return\n \n \n if tarinfo.isreg():\n with bltn_open(name,\"rb\")as f:\n self.addfile(tarinfo,f)\n \n elif tarinfo.isdir():\n self.addfile(tarinfo)\n if recursive:\n for f in os.listdir(name):\n self.add(os.path.join(name,f),os.path.join(arcname,f),\n recursive,exclude,filter=filter)\n \n else :\n self.addfile(tarinfo)\n \n def addfile(self,tarinfo,fileobj=None ):\n ''\n\n\n\n\n \n self._check(\"aw\")\n \n tarinfo=copy.copy(tarinfo)\n \n buf=tarinfo.tobuf(self.format,self.encoding,self.errors)\n self.fileobj.write(buf)\n self.offset +=len(buf)\n \n \n if fileobj is not None :\n copyfileobj(fileobj,self.fileobj,tarinfo.size)\n blocks,remainder=divmod(tarinfo.size,BLOCKSIZE)\n if remainder >0:\n self.fileobj.write(NUL *(BLOCKSIZE -remainder))\n blocks +=1\n self.offset +=blocks *BLOCKSIZE\n \n self.members.append(tarinfo)\n \n def extractall(self,path=\".\",members=None ):\n ''\n\n\n\n\n \n directories=[]\n \n if members is None :\n members=self\n \n for tarinfo in members:\n if tarinfo.isdir():\n \n directories.append(tarinfo)\n tarinfo=copy.copy(tarinfo)\n tarinfo.mode=0o700\n \n self.extract(tarinfo,path,set_attrs=not tarinfo.isdir())\n \n \n directories.sort(key=lambda a:a.name)\n directories.reverse()\n \n \n for tarinfo in directories:\n dirpath=os.path.join(path,tarinfo.name)\n try :\n self.chown(tarinfo,dirpath)\n self.utime(tarinfo,dirpath)\n self.chmod(tarinfo,dirpath)\n except ExtractError as e:\n if self.errorlevel >1:\n raise\n else :\n self._dbg(1,\"tarfile: %s\"%e)\n \n def extract(self,member,path=\"\",set_attrs=True ):\n ''\n\n\n\n\n \n self._check(\"r\")\n \n if isinstance(member,str):\n tarinfo=self.getmember(member)\n else :\n tarinfo=member\n \n \n if tarinfo.islnk():\n tarinfo._link_target=os.path.join(path,tarinfo.linkname)\n \n try :\n self._extract_member(tarinfo,os.path.join(path,tarinfo.name),\n set_attrs=set_attrs)\n except EnvironmentError as e:\n if self.errorlevel >0:\n raise\n else :\n if e.filename is None :\n self._dbg(1,\"tarfile: %s\"%e.strerror)\n else :\n self._dbg(1,\"tarfile: %s %r\"%(e.strerror,e.filename))\n except ExtractError as e:\n if self.errorlevel >1:\n raise\n else :\n self._dbg(1,\"tarfile: %s\"%e)\n \n def extractfile(self,member):\n ''\n\n\n\n \n self._check(\"r\")\n \n if isinstance(member,str):\n tarinfo=self.getmember(member)\n else :\n tarinfo=member\n \n if tarinfo.isreg()or tarinfo.type not in SUPPORTED_TYPES:\n \n return self.fileobject(self,tarinfo)\n \n elif tarinfo.islnk()or tarinfo.issym():\n if isinstance(self.fileobj,_Stream):\n \n \n \n raise StreamError(\"cannot extract (sym)link as file object\")\n else :\n \n return self.extractfile(self._find_link_target(tarinfo))\n else :\n \n \n return None\n \n def _extract_member(self,tarinfo,targetpath,set_attrs=True ):\n ''\n\n \n \n \n \n targetpath=targetpath.rstrip(\"/\")\n targetpath=targetpath.replace(\"/\",os.sep)\n \n \n upperdirs=os.path.dirname(targetpath)\n if upperdirs and not os.path.exists(upperdirs):\n \n \n os.makedirs(upperdirs)\n \n if tarinfo.islnk()or tarinfo.issym():\n self._dbg(1,\"%s -> %s\"%(tarinfo.name,tarinfo.linkname))\n else :\n self._dbg(1,tarinfo.name)\n \n if tarinfo.isreg():\n self.makefile(tarinfo,targetpath)\n elif tarinfo.isdir():\n self.makedir(tarinfo,targetpath)\n elif tarinfo.isfifo():\n self.makefifo(tarinfo,targetpath)\n elif tarinfo.ischr()or tarinfo.isblk():\n self.makedev(tarinfo,targetpath)\n elif tarinfo.islnk()or tarinfo.issym():\n self.makelink(tarinfo,targetpath)\n elif tarinfo.type not in SUPPORTED_TYPES:\n self.makeunknown(tarinfo,targetpath)\n else :\n self.makefile(tarinfo,targetpath)\n \n if set_attrs:\n self.chown(tarinfo,targetpath)\n if not tarinfo.issym():\n self.chmod(tarinfo,targetpath)\n self.utime(tarinfo,targetpath)\n \n \n \n \n \n \n def makedir(self,tarinfo,targetpath):\n ''\n \n try :\n \n \n os.mkdir(targetpath,0o700)\n except FileExistsError:\n pass\n \n def makefile(self,tarinfo,targetpath):\n ''\n \n source=self.fileobj\n source.seek(tarinfo.offset_data)\n with bltn_open(targetpath,\"wb\")as target:\n if tarinfo.sparse is not None :\n for offset,size in tarinfo.sparse:\n target.seek(offset)\n copyfileobj(source,target,size)\n else :\n copyfileobj(source,target,tarinfo.size)\n target.seek(tarinfo.size)\n target.truncate()\n \n def makeunknown(self,tarinfo,targetpath):\n ''\n\n \n self.makefile(tarinfo,targetpath)\n self._dbg(1,\"tarfile: Unknown file type %r, \" \"extracted as regular file.\"%tarinfo.type)\n \n def makefifo(self,tarinfo,targetpath):\n ''\n \n if hasattr(os,\"mkfifo\"):\n os.mkfifo(targetpath)\n else :\n raise ExtractError(\"fifo not supported by system\")\n \n def makedev(self,tarinfo,targetpath):\n ''\n \n if not hasattr(os,\"mknod\")or not hasattr(os,\"makedev\"):\n raise ExtractError(\"special devices not supported by system\")\n \n mode=tarinfo.mode\n if tarinfo.isblk():\n mode |=stat.S_IFBLK\n else :\n mode |=stat.S_IFCHR\n \n os.mknod(targetpath,mode,\n os.makedev(tarinfo.devmajor,tarinfo.devminor))\n \n def makelink(self,tarinfo,targetpath):\n ''\n\n\n \n try :\n \n if tarinfo.issym():\n os.symlink(tarinfo.linkname,targetpath)\n else :\n \n if os.path.exists(tarinfo._link_target):\n os.link(tarinfo._link_target,targetpath)\n else :\n self._extract_member(self._find_link_target(tarinfo),\n targetpath)\n except symlink_exception:\n try :\n self._extract_member(self._find_link_target(tarinfo),\n targetpath)\n except KeyError:\n raise ExtractError(\"unable to resolve link inside archive\")\n \n def chown(self,tarinfo,targetpath):\n ''\n \n if pwd and hasattr(os,\"geteuid\")and os.geteuid()==0:\n \n try :\n g=grp.getgrnam(tarinfo.gname)[2]\n except KeyError:\n g=tarinfo.gid\n try :\n u=pwd.getpwnam(tarinfo.uname)[2]\n except KeyError:\n u=tarinfo.uid\n try :\n if tarinfo.issym()and hasattr(os,\"lchown\"):\n os.lchown(targetpath,u,g)\n else :\n if sys.platform !=\"os2emx\":\n os.chown(targetpath,u,g)\n except EnvironmentError as e:\n raise ExtractError(\"could not change owner\")\n \n def chmod(self,tarinfo,targetpath):\n ''\n \n if hasattr(os,'chmod'):\n try :\n os.chmod(targetpath,tarinfo.mode)\n except EnvironmentError as e:\n raise ExtractError(\"could not change mode\")\n \n def utime(self,tarinfo,targetpath):\n ''\n \n if not hasattr(os,'utime'):\n return\n try :\n os.utime(targetpath,(tarinfo.mtime,tarinfo.mtime))\n except EnvironmentError as e:\n raise ExtractError(\"could not change modification time\")\n \n \n def next(self):\n ''\n\n\n \n self._check(\"ra\")\n if self.firstmember is not None :\n m=self.firstmember\n self.firstmember=None\n return m\n \n \n self.fileobj.seek(self.offset)\n tarinfo=None\n while True :\n try :\n tarinfo=self.tarinfo.fromtarfile(self)\n except EOFHeaderError as e:\n if self.ignore_zeros:\n self._dbg(2,\"0x%X: %s\"%(self.offset,e))\n self.offset +=BLOCKSIZE\n continue\n except InvalidHeaderError as e:\n if self.ignore_zeros:\n self._dbg(2,\"0x%X: %s\"%(self.offset,e))\n self.offset +=BLOCKSIZE\n continue\n elif self.offset ==0:\n raise ReadError(str(e))\n except EmptyHeaderError:\n if self.offset ==0:\n raise ReadError(\"empty file\")\n except TruncatedHeaderError as e:\n if self.offset ==0:\n raise ReadError(str(e))\n except SubsequentHeaderError as e:\n raise ReadError(str(e))\n break\n \n if tarinfo is not None :\n self.members.append(tarinfo)\n else :\n self._loaded=True\n \n return tarinfo\n \n \n \n \n def _getmember(self,name,tarinfo=None ,normalize=False ):\n ''\n\n \n \n members=self.getmembers()\n \n \n if tarinfo is not None :\n members=members[:members.index(tarinfo)]\n \n if normalize:\n name=os.path.normpath(name)\n \n for member in reversed(members):\n if normalize:\n member_name=os.path.normpath(member.name)\n else :\n member_name=member.name\n \n if name ==member_name:\n return member\n \n def _load(self):\n ''\n\n \n while True :\n tarinfo=self.next()\n if tarinfo is None :\n break\n self._loaded=True\n \n def _check(self,mode=None ):\n ''\n\n \n if self.closed:\n raise IOError(\"%s is closed\"%self.__class__.__name__)\n if mode is not None and self.mode not in mode:\n raise IOError(\"bad operation for mode %r\"%self.mode)\n \n def _find_link_target(self,tarinfo):\n ''\n\n \n if tarinfo.issym():\n \n linkname=\"/\".join(filter(None ,(os.path.dirname(tarinfo.name),tarinfo.linkname)))\n limit=None\n else :\n \n \n linkname=tarinfo.linkname\n limit=tarinfo\n \n member=self._getmember(linkname,tarinfo=limit,normalize=True )\n if member is None :\n raise KeyError(\"linkname %r not found\"%linkname)\n return member\n \n def __iter__(self):\n ''\n \n if self._loaded:\n return iter(self.members)\n else :\n return TarIter(self)\n \n def _dbg(self,level,msg):\n ''\n \n if level <=self.debug:\n print(msg,file=sys.stderr)\n \n def __enter__(self):\n self._check()\n return self\n \n def __exit__(self,type,value,traceback):\n if type is None :\n self.close()\n else :\n \n \n if not self._extfileobj:\n self.fileobj.close()\n self.closed=True\n \n \nclass TarIter:\n ''\n\n\n\n \n \n def __init__(self,tarfile):\n ''\n \n self.tarfile=tarfile\n self.index=0\n def __iter__(self):\n ''\n \n return self\n def __next__(self):\n ''\n\n \n \n \n \n \n if self.index ==0 and self.tarfile.firstmember is not None :\n tarinfo=self.tarfile.next()\n elif self.index '\n'?'\n'@'\n'A'\n'B'\n'C'\n'D'\n'E'\n'F'\n'G'\n'H'\n'I'\n'J'\n'K'\n'L'\n'M'\n'N'\n'O'\n'P'\n'Q'\n'R'\n'S'\n'T'\n'U'\n'V'\n'W'\n'X'\n'Y'\n'Z'\n'['\n'\\\\'\n']'\n'^'\n'_'\n'`'\n'a'\n'b'\n'c'\n'd'\n'e'\n'f'\n'g'\n'h'\n'i'\n'j'\n'k'\n'l'\n'm'\n'n'\n'o'\n'p'\n'q'\n'r'\n's'\n't'\n'u'\n'v'\n'w'\n'x'\n'y'\n'z'\n'{'\n'|'\n'}'\n'~'\n'\\x7f'\n'\\xc4'\n'\\xc5'\n'\\xc7'\n'\\xc9'\n'\\xd1'\n'\\xd6'\n'\\xdc'\n'\\xe1'\n'\\xe0'\n'\\xe2'\n'\\xe4'\n'\\xe3'\n'\\xe5'\n'\\xe7'\n'\\xe9'\n'\\xe8'\n'\\xea'\n'\\xeb'\n'\\xed'\n'\\xec'\n'\\xee'\n'\\xef'\n'\\xf1'\n'\\xf3'\n'\\xf2'\n'\\xf4'\n'\\xf6'\n'\\xf5'\n'\\xfa'\n'\\xf9'\n'\\xfb'\n'\\xfc'\n'\\u2020'\n'\\xb0'\n'\\xa2'\n'\\xa3'\n'\\xa7'\n'\\u2022'\n'\\xb6'\n'\\xdf'\n'\\xae'\n'\\xa9'\n'\\u2122'\n'\\xb4'\n'\\xa8'\n'\\u2260'\n'\\xc6'\n'\\xd8'\n'\\u221e'\n'\\xb1'\n'\\u2264'\n'\\u2265'\n'\\xa5'\n'\\xb5'\n'\\u2202'\n'\\u2211'\n'\\u220f'\n'\\u03c0'\n'\\u222b'\n'\\xaa'\n'\\xba'\n'\\u03a9'\n'\\xe6'\n'\\xf8'\n'\\xbf'\n'\\xa1'\n'\\xac'\n'\\u221a'\n'\\u0192'\n'\\u2248'\n'\\u2206'\n'\\xab'\n'\\xbb'\n'\\u2026'\n'\\xa0'\n'\\xc0'\n'\\xc3'\n'\\xd5'\n'\\u0152'\n'\\u0153'\n'\\u2013'\n'\\u2014'\n'\\u201c'\n'\\u201d'\n'\\u2018'\n'\\u2019'\n'\\xf7'\n'\\u25ca'\n'\\xff'\n'\\u0178'\n'\\u2044'\n'\\u20ac'\n'\\u2039'\n'\\u203a'\n'\\ufb01'\n'\\ufb02'\n'\\u2021'\n'\\xb7'\n'\\u201a'\n'\\u201e'\n'\\u2030'\n'\\xc2'\n'\\xca'\n'\\xc1'\n'\\xcb'\n'\\xc8'\n'\\xcd'\n'\\xce'\n'\\xcf'\n'\\xcc'\n'\\xd3'\n'\\xd4'\n'\\uf8ff'\n'\\xd2'\n'\\xda'\n'\\xdb'\n'\\xd9'\n'\\u0131'\n'\\u02c6'\n'\\u02dc'\n'\\xaf'\n'\\u02d8'\n'\\u02d9'\n'\\u02da'\n'\\xb8'\n'\\u02dd'\n'\\u02db'\n'\\u02c7'\n)\n\n\nencoding_table=codecs.charmap_build(decoding_table)\n"], "doctest": [".py", "\n\n\n\n\n\n\n\nr\"\"\"Module doctest -- a framework for running examples in docstrings.\n\nIn simplest use, end each module M to be tested with:\n\ndef _test():\n import doctest\n doctest.testmod()\n\nif __name__ == \"__main__\":\n _test()\n\nThen running the module as a script will cause the examples in the\ndocstrings to get executed and verified:\n\npython M.py\n\nThis won't display anything unless an example fails, in which case the\nfailing example(s) and the cause(s) of the failure(s) are printed to stdout\n(why not stderr? because stderr is a lame hack <0.2 wink>), and the final\nline of output is \"Test failed.\".\n\nRun it with the -v switch instead:\n\npython M.py -v\n\nand a detailed report of all examples tried is printed to stdout, along\nwith assorted summaries at the end.\n\nYou can force verbose mode by passing \"verbose=True\" to testmod, or prohibit\nit by passing \"verbose=False\". In either of those cases, sys.argv is not\nexamined by testmod.\n\nThere are a variety of other ways to run doctests, including integration\nwith the unittest framework, and support for running non-Python text\nfiles containing doctests. There are also many ways to override parts\nof doctest's default behaviors. See the Library Reference Manual for\ndetails.\n\"\"\"\n\n__docformat__='reStructuredText en'\n\n__all__=[\n\n'register_optionflag',\n'DONT_ACCEPT_TRUE_FOR_1',\n'DONT_ACCEPT_BLANKLINE',\n'NORMALIZE_WHITESPACE',\n'ELLIPSIS',\n'SKIP',\n'IGNORE_EXCEPTION_DETAIL',\n'COMPARISON_FLAGS',\n'REPORT_UDIFF',\n'REPORT_CDIFF',\n'REPORT_NDIFF',\n'REPORT_ONLY_FIRST_FAILURE',\n'REPORTING_FLAGS',\n'FAIL_FAST',\n\n\n'Example',\n'DocTest',\n\n'DocTestParser',\n\n'DocTestFinder',\n\n'DocTestRunner',\n'OutputChecker',\n'DocTestFailure',\n'UnexpectedException',\n'DebugRunner',\n\n'testmod',\n'testfile',\n'run_docstring_examples',\n\n'DocTestSuite',\n'DocFileSuite',\n'set_unittest_reportflags',\n\n'script_from_examples',\n'testsource',\n'debug_src',\n'debug',\n]\n\nimport __future__\nimport argparse\nimport difflib\nimport inspect\nimport linecache\nimport os\nimport pdb\nimport re\nimport sys\nimport traceback\nimport unittest\nfrom io import StringIO\nfrom collections import namedtuple\n\nTestResults=namedtuple('TestResults','failed attempted')\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\nOPTIONFLAGS_BY_NAME={}\ndef register_optionflag(name):\n\n return OPTIONFLAGS_BY_NAME.setdefault(name,1 <=2\n \n \n startpos,endpos=0,len(got)\n w=ws[0]\n if w:\n if got.startswith(w):\n startpos=len(w)\n del ws[0]\n else :\n return False\n w=ws[-1]\n if w:\n if got.endswith(w):\n endpos -=len(w)\n del ws[-1]\n else :\n return False\n \n if startpos >endpos:\n \n \n return False\n \n \n \n \n for w in ws:\n \n \n \n startpos=got.find(w,startpos,endpos)\n if startpos <0:\n return False\n startpos +=len(w)\n \n return True\n \ndef _comment_line(line):\n ''\n line=line.rstrip()\n if line:\n return'# '+line\n else :\n return'#'\n \ndef _strip_exception_details(msg):\n\n\n\n\n\n\n\n\n\n\n start,end=0,len(msg)\n \n i=msg.find(\"\\n\")\n if i >=0:\n end=i\n \n i=msg.find(':',0,end)\n if i >=0:\n end=i\n \n i=msg.rfind('.',0,end)\n if i >=0:\n start=i+1\n return msg[start:end]\n \nclass _OutputRedirectingPdb(pdb.Pdb):\n ''\n\n\n\n \n def __init__(self,out):\n self.__out=out\n self.__debugger_used=False\n \n pdb.Pdb.__init__(self,stdout=out,nosigint=True )\n \n self.use_rawinput=1\n \n def set_trace(self,frame=None ):\n self.__debugger_used=True\n if frame is None :\n frame=sys._getframe().f_back\n pdb.Pdb.set_trace(self,frame)\n \n def set_continue(self):\n \n \n if self.__debugger_used:\n pdb.Pdb.set_continue(self)\n \n def trace_dispatch(self,*args):\n \n save_stdout=sys.stdout\n sys.stdout=self.__out\n \n try :\n return pdb.Pdb.trace_dispatch(self,*args)\n finally :\n sys.stdout=save_stdout\n \n \ndef _module_relative_path(module,path):\n if not inspect.ismodule(module):\n raise TypeError('Expected a module: %r'%module)\n if path.startswith('/'):\n raise ValueError('Module-relative files may not have absolute paths')\n \n \n if hasattr(module,'__file__'):\n \n basedir=os.path.split(module.__file__)[0]\n elif module.__name__ =='__main__':\n \n if len(sys.argv)>0 and sys.argv[0]!='':\n basedir=os.path.split(sys.argv[0])[0]\n else :\n basedir=os.curdir\n else :\n \n raise ValueError(\"Can't resolve paths relative to the module \"+\n module+\" (it has no __file__)\")\n \n \n return os.path.join(basedir,*(path.split('/')))\n \n \n \n \n \n \n \n \n \n \n \n \n \nclass Example:\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n def __init__(self,source,want,exc_msg=None ,lineno=0,indent=0,\n options=None ):\n \n if not source.endswith('\\n'):\n source +='\\n'\n if want and not want.endswith('\\n'):\n want +='\\n'\n if exc_msg is not None and not exc_msg.endswith('\\n'):\n exc_msg +='\\n'\n \n self.source=source\n self.want=want\n self.lineno=lineno\n self.indent=indent\n if options is None :options={}\n self.options=options\n self.exc_msg=exc_msg\n \n def __eq__(self,other):\n if type(self)is not type(other):\n return NotImplemented\n \n return self.source ==other.source and self.want ==other.want and self.lineno ==other.lineno and self.indent ==other.indent and self.options ==other.options and self.exc_msg ==other.exc_msg\n \n def __hash__(self):\n return hash((self.source,self.want,self.lineno,self.indent,\n self.exc_msg))\n \nclass DocTest:\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n def __init__(self,examples,globs,name,filename,lineno,docstring):\n ''\n\n\n \n assert not isinstance(examples,str), \"DocTest no longer accepts str; use DocTestParser instead\"\n self.examples=examples\n self.docstring=docstring\n self.globs=globs.copy()\n self.name=name\n self.filename=filename\n self.lineno=lineno\n \n def __repr__(self):\n if len(self.examples)==0:\n examples='no examples'\n elif len(self.examples)==1:\n examples='1 example'\n else :\n examples='%d examples'%len(self.examples)\n return (''%\n (self.name,self.filename,self.lineno,examples))\n \n def __eq__(self,other):\n if type(self)is not type(other):\n return NotImplemented\n \n return self.examples ==other.examples and self.docstring ==other.docstring and self.globs ==other.globs and self.name ==other.name and self.filename ==other.filename and self.lineno ==other.lineno\n \n def __hash__(self):\n return hash((self.docstring,self.name,self.filename,self.lineno))\n \n \n def __lt__(self,other):\n if not isinstance(other,DocTest):\n return NotImplemented\n return ((self.name,self.filename,self.lineno,id(self))\n <\n (other.name,other.filename,other.lineno,id(other)))\n \n \n \n \n \nclass DocTestParser:\n ''\n\n \n \n \n \n \n \n _EXAMPLE_RE=re.compile(r'''\n # Source consists of a PS1 line followed by zero or more PS2 lines.\n (?P\n (?:^(?P [ ]*) >>> .*) # PS1 line\n (?:\\n [ ]* \\.\\.\\. .*)*) # PS2 lines\n \\n?\n # Want consists of any non-blank lines that do not start with PS1.\n (?P (?:(?![ ]*$) # Not a blank line\n (?![ ]*>>>) # Not a line starting with PS1\n .+$\\n? # But any other line\n )*)\n ''',re.MULTILINE |re.VERBOSE)\n \n \n \n \n \n \n \n \n \n \n _EXCEPTION_RE=re.compile(r\"\"\"\n # Grab the traceback header. Different versions of Python have\n # said different things on the first traceback line.\n ^(?P Traceback\\ \\(\n (?: most\\ recent\\ call\\ last\n | innermost\\ last\n ) \\) :\n )\n \\s* $ # toss trailing whitespace on the header.\n (?P .*?) # don't blink: absorb stuff until...\n ^ (?P \\w+ .*) # a line *starts* with alphanum.\n \"\"\",re.VERBOSE |re.MULTILINE |re.DOTALL)\n \n \n \n _IS_BLANK_OR_COMMENT=re.compile(r'^[ ]*(#.*)?$').match\n \n def parse(self,string,name=''):\n ''\n\n\n\n\n\n \n string=string.expandtabs()\n \n min_indent=self._min_indent(string)\n if min_indent >0:\n string='\\n'.join([l[min_indent:]for l in string.split('\\n')])\n \n output=[]\n charno,lineno=0,0\n \n for m in self._EXAMPLE_RE.finditer(string):\n \n output.append(string[charno:m.start()])\n \n lineno +=string.count('\\n',charno,m.start())\n \n (source,options,want,exc_msg)= self._parse_example(m,name,lineno)\n \n if not self._IS_BLANK_OR_COMMENT(source):\n output.append(Example(source,want,exc_msg,\n lineno=lineno,\n indent=min_indent+len(m.group('indent')),\n options=options))\n \n lineno +=string.count('\\n',m.start(),m.end())\n \n charno=m.end()\n \n output.append(string[charno:])\n return output\n \n def get_doctest(self,string,globs,name,filename,lineno):\n ''\n\n\n\n\n\n\n \n return DocTest(self.get_examples(string,name),globs,\n name,filename,lineno,string)\n \n def get_examples(self,string,name=''):\n ''\n\n\n\n\n\n\n\n\n \n return [x for x in self.parse(string,name)\n if isinstance(x,Example)]\n \n def _parse_example(self,m,name,lineno):\n ''\n\n\n\n\n\n\n\n\n \n \n indent=len(m.group('indent'))\n \n \n \n source_lines=m.group('source').split('\\n')\n self._check_prompt_blank(source_lines,indent,name,lineno)\n self._check_prefix(source_lines[1:],' '*indent+'.',name,lineno)\n source='\\n'.join([sl[indent+4:]for sl in source_lines])\n \n \n \n \n want=m.group('want')\n want_lines=want.split('\\n')\n if len(want_lines)>1 and re.match(r' *$',want_lines[-1]):\n del want_lines[-1]\n self._check_prefix(want_lines,' '*indent,name,\n lineno+len(source_lines))\n want='\\n'.join([wl[indent:]for wl in want_lines])\n \n \n m=self._EXCEPTION_RE.match(want)\n if m:\n exc_msg=m.group('msg')\n else :\n exc_msg=None\n \n \n options=self._find_options(source,name,lineno)\n \n return source,options,want,exc_msg\n \n \n \n \n \n \n \n \n _OPTION_DIRECTIVE_RE=re.compile(r'#\\s*doctest:\\s*([^\\n\\'\"]*)$',\n re.MULTILINE)\n \n def _find_options(self,source,name,lineno):\n ''\n\n\n\n\n\n \n options={}\n \n for m in self._OPTION_DIRECTIVE_RE.finditer(source):\n option_strings=m.group(1).replace(',',' ').split()\n for option in option_strings:\n if (option[0]not in'+-'or\n option[1:]not in OPTIONFLAGS_BY_NAME):\n raise ValueError('line %r of the doctest for %s '\n 'has an invalid option: %r'%\n (lineno+1,name,option))\n flag=OPTIONFLAGS_BY_NAME[option[1:]]\n options[flag]=(option[0]=='+')\n if options and self._IS_BLANK_OR_COMMENT(source):\n raise ValueError('line %r of the doctest for %s has an option '\n 'directive on a line with no example: %r'%\n (lineno,name,source))\n return options\n \n \n \n _INDENT_RE=re.compile('^([ ]*)(?=\\S)',re.MULTILINE)\n \n def _min_indent(self,s):\n ''\n indents=[len(indent)for indent in self._INDENT_RE.findall(s)]\n if len(indents)>0:\n return min(indents)\n else :\n return 0\n \n def _check_prompt_blank(self,lines,indent,name,lineno):\n ''\n\n\n\n\n \n for i,line in enumerate(lines):\n if len(line)>=indent+4 and line[indent+3]!=' ':\n raise ValueError('line %r of the docstring for %s '\n 'lacks blank after %s: %r'%\n (lineno+i+1,name,\n line[indent:indent+3],line))\n \n def _check_prefix(self,lines,prefix,name,lineno):\n ''\n\n\n \n for i,line in enumerate(lines):\n if line and not line.startswith(prefix):\n raise ValueError('line %r of the docstring for %s has '\n 'inconsistent leading whitespace: %r'%\n (lineno+i+1,name,line))\n \n \n \n \n \n \nclass DocTestFinder:\n ''\n\n\n\n\n\n \n \n def __init__(self,verbose=False ,parser=DocTestParser(),\n recurse=True ,exclude_empty=True ):\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n self._parser=parser\n self._verbose=verbose\n self._recurse=recurse\n self._exclude_empty=exclude_empty\n \n def find(self,obj,name=None ,module=None ,globs=None ,extraglobs=None ):\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n \n if name is None :\n name=getattr(obj,'__name__',None )\n if name is None :\n raise ValueError(\"DocTestFinder.find: name must be given \"\n \"when obj.__name__ doesn't exist: %r\"%\n (type(obj),))\n \n \n \n \n if module is False :\n module=None\n elif module is None :\n module=inspect.getmodule(obj)\n \n \n \n \n try :\n file=inspect.getsourcefile(obj)\n except TypeError:\n source_lines=None\n else :\n if not file:\n \n \n file=inspect.getfile(obj)\n if not file[0]+file[-2:]=='<]>':file=None\n if file is None :\n source_lines=None\n else :\n if module is not None :\n \n \n \n source_lines=linecache.getlines(file,module.__dict__)\n else :\n \n \n source_lines=linecache.getlines(file)\n if not source_lines:\n source_lines=None\n \n \n if globs is None :\n if module is None :\n globs={}\n else :\n globs=module.__dict__.copy()\n else :\n globs=globs.copy()\n if extraglobs is not None :\n globs.update(extraglobs)\n if'__name__'not in globs:\n globs['__name__']='__main__'\n \n \n tests=[]\n self._find(tests,obj,name,module,source_lines,globs,{})\n \n \n \n \n tests.sort()\n return tests\n \n def _from_module(self,module,object):\n ''\n\n\n \n if module is None :\n return True\n elif inspect.getmodule(object)is not None :\n return module is inspect.getmodule(object)\n elif inspect.isfunction(object):\n return module.__dict__ is object.__globals__\n elif inspect.ismethoddescriptor(object):\n if hasattr(object,'__objclass__'):\n obj_mod=object.__objclass__.__module__\n elif hasattr(object,'__module__'):\n obj_mod=object.__module__\n else :\n return True\n return module.__name__ ==obj_mod\n elif inspect.isclass(object):\n return module.__name__ ==object.__module__\n elif hasattr(object,'__module__'):\n return module.__name__ ==object.__module__\n elif isinstance(object,property):\n return True\n else :\n raise ValueError(\"object must be a class or function\")\n \n def _find(self,tests,obj,name,module,source_lines,globs,seen):\n ''\n\n\n \n if self._verbose:\n print('Finding tests in %s'%name)\n \n \n if id(obj)in seen:\n return\n seen[id(obj)]=1\n \n \n test=self._get_test(obj,name,module,globs,source_lines)\n if test is not None :\n tests.append(test)\n \n \n if inspect.ismodule(obj)and self._recurse:\n for valname,val in obj.__dict__.items():\n valname='%s.%s'%(name,valname)\n \n if ((inspect.isroutine(val)or inspect.isclass(val))and\n self._from_module(module,val)):\n self._find(tests,val,valname,module,source_lines,\n globs,seen)\n \n \n if inspect.ismodule(obj)and self._recurse:\n for valname,val in getattr(obj,'__test__',{}).items():\n if not isinstance(valname,str):\n raise ValueError(\"DocTestFinder.find: __test__ keys \"\n \"must be strings: %r\"%\n (type(valname),))\n if not (inspect.isroutine(val)or inspect.isclass(val)or\n inspect.ismodule(val)or isinstance(val,str)):\n raise ValueError(\"DocTestFinder.find: __test__ values \"\n \"must be strings, functions, methods, \"\n \"classes, or modules: %r\"%\n (type(val),))\n valname='%s.__test__.%s'%(name,valname)\n self._find(tests,val,valname,module,source_lines,\n globs,seen)\n \n \n if inspect.isclass(obj)and self._recurse:\n for valname,val in obj.__dict__.items():\n \n if isinstance(val,staticmethod):\n val=getattr(obj,valname)\n if isinstance(val,classmethod):\n val=getattr(obj,valname).__func__\n \n \n if ((inspect.isroutine(val)or inspect.isclass(val)or\n isinstance(val,property))and\n self._from_module(module,val)):\n valname='%s.%s'%(name,valname)\n self._find(tests,val,valname,module,source_lines,\n globs,seen)\n \n def _get_test(self,obj,name,module,globs,source_lines):\n ''\n\n\n \n \n \n if isinstance(obj,str):\n docstring=obj\n else :\n try :\n if obj.__doc__ is None :\n docstring=''\n else :\n docstring=obj.__doc__\n if not isinstance(docstring,str):\n docstring=str(docstring)\n except (TypeError,AttributeError):\n docstring=''\n \n \n lineno=self._find_lineno(obj,source_lines)\n \n \n if self._exclude_empty and not docstring:\n return None\n \n \n if module is None :\n filename=None\n else :\n filename=getattr(module,'__file__',module.__name__)\n if filename[-4:]in (\".pyc\",\".pyo\"):\n filename=filename[:-1]\n return self._parser.get_doctest(docstring,globs,name,\n filename,lineno)\n \n def _find_lineno(self,obj,source_lines):\n ''\n\n\n \n lineno=None\n \n \n if inspect.ismodule(obj):\n lineno=0\n \n \n \n \n if inspect.isclass(obj):\n if source_lines is None :\n return None\n pat=re.compile(r'^\\s*class\\s*%s\\b'%\n getattr(obj,'__name__','-'))\n for i,line in enumerate(source_lines):\n if pat.match(line):\n lineno=i\n break\n \n \n if inspect.ismethod(obj):obj=obj.__func__\n if inspect.isfunction(obj):obj=obj.__code__\n if inspect.istraceback(obj):obj=obj.tb_frame\n if inspect.isframe(obj):obj=obj.f_code\n if inspect.iscode(obj):\n lineno=getattr(obj,'co_firstlineno',None )-1\n \n \n \n \n \n \n if lineno is not None :\n if source_lines is None :\n return lineno+1\n pat=re.compile('(^|.*:)\\s*\\w*(\"|\\')')\n for lineno in range(lineno,len(source_lines)):\n if pat.match(source_lines[lineno]):\n return lineno\n \n \n return None\n \n \n \n \n \nclass DocTestRunner:\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n \n \n DIVIDER=\"*\"*70\n \n def __init__(self,checker=None ,verbose=None ,optionflags=0):\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n self._checker=checker or OutputChecker()\n if verbose is None :\n verbose='-v'in sys.argv\n self._verbose=verbose\n self.optionflags=optionflags\n self.original_optionflags=optionflags\n \n \n self.tries=0\n self.failures=0\n self._name2ft={}\n \n \n self._fakeout=_SpoofOut()\n \n \n \n \n \n def report_start(self,out,test,example):\n ''\n\n\n \n if self._verbose:\n if example.want:\n out('Trying:\\n'+_indent(example.source)+\n 'Expecting:\\n'+_indent(example.want))\n else :\n out('Trying:\\n'+_indent(example.source)+\n 'Expecting nothing\\n')\n \n def report_success(self,out,test,example,got):\n ''\n\n\n \n if self._verbose:\n out(\"ok\\n\")\n \n def report_failure(self,out,test,example,got):\n ''\n\n \n out(self._failure_header(test,example)+\n self._checker.output_difference(example,got,self.optionflags))\n \n def report_unexpected_exception(self,out,test,example,exc_info):\n ''\n\n \n out(self._failure_header(test,example)+\n 'Exception raised:\\n'+_indent(_exception_traceback(exc_info)))\n \n def _failure_header(self,test,example):\n out=[self.DIVIDER]\n if test.filename:\n if test.lineno is not None and example.lineno is not None :\n lineno=test.lineno+example.lineno+1\n else :\n lineno='?'\n out.append('File \"%s\", line %s, in %s'%\n (test.filename,lineno,test.name))\n else :\n out.append('Line %s, in %s'%(example.lineno+1,test.name))\n out.append('Failed example:')\n source=example.source\n out.append(_indent(source))\n return'\\n'.join(out)\n \n \n \n \n \n def __run(self,test,compileflags,out):\n ''\n\n\n\n\n\n\n\n \n \n failures=tries=0\n \n \n \n original_optionflags=self.optionflags\n \n SUCCESS,FAILURE,BOOM=range(3)\n \n check=self._checker.check_output\n \n \n for examplenum,example in enumerate(test.examples):\n \n \n \n quiet=(self.optionflags&REPORT_ONLY_FIRST_FAILURE and\n failures >0)\n \n \n self.optionflags=original_optionflags\n if example.options:\n for (optionflag,val)in example.options.items():\n if val:\n self.optionflags |=optionflag\n else :\n self.optionflags &=~optionflag\n \n \n if self.optionflags&SKIP:\n continue\n \n \n tries +=1\n if not quiet:\n self.report_start(out,test,example)\n \n \n \n \n filename=''%(test.name,examplenum)\n \n \n \n \n try :\n \n exec(compile(example.source,filename,\"single\",\n compileflags,1),test.globs)\n self.debugger.set_continue()\n exception=None\n except KeyboardInterrupt:\n raise\n except :\n exception=sys.exc_info()\n self.debugger.set_continue()\n \n got=self._fakeout.getvalue()\n self._fakeout.truncate(0)\n outcome=FAILURE\n \n \n \n if exception is None :\n if check(example.want,got,self.optionflags):\n outcome=SUCCESS\n \n \n else :\n exc_msg=traceback.format_exception_only(*exception[:2])[-1]\n if not quiet:\n got +=_exception_traceback(exception)\n \n \n \n if example.exc_msg is None :\n outcome=BOOM\n \n \n elif check(example.exc_msg,exc_msg,self.optionflags):\n outcome=SUCCESS\n \n \n elif self.optionflags&IGNORE_EXCEPTION_DETAIL:\n if check(_strip_exception_details(example.exc_msg),\n _strip_exception_details(exc_msg),\n self.optionflags):\n outcome=SUCCESS\n \n \n if outcome is SUCCESS:\n if not quiet:\n self.report_success(out,test,example,got)\n elif outcome is FAILURE:\n if not quiet:\n self.report_failure(out,test,example,got)\n failures +=1\n elif outcome is BOOM:\n if not quiet:\n self.report_unexpected_exception(out,test,example,\n exception)\n failures +=1\n else :\n assert False ,(\"unknown outcome\",outcome)\n \n if failures and self.optionflags&FAIL_FAST:\n break\n \n \n self.optionflags=original_optionflags\n \n \n self.__record_outcome(test,failures,tries)\n return TestResults(failures,tries)\n \n def __record_outcome(self,test,f,t):\n ''\n\n\n \n f2,t2=self._name2ft.get(test.name,(0,0))\n self._name2ft[test.name]=(f+f2,t+t2)\n self.failures +=f\n self.tries +=t\n \n __LINECACHE_FILENAME_RE=re.compile(r'.+)'\n r'\\[(?P\\d+)\\]>$')\n def __patched_linecache_getlines(self,filename,module_globals=None ):\n m=self.__LINECACHE_FILENAME_RE.match(filename)\n if m and m.group('name')==self.test.name:\n example=self.test.examples[int(m.group('examplenum'))]\n return example.source.splitlines(keepends=True )\n else :\n return self.save_linecache_getlines(filename,module_globals)\n \n def run(self,test,compileflags=None ,out=None ,clear_globs=True ):\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n self.test=test\n \n if compileflags is None :\n compileflags=_extract_future_flags(test.globs)\n \n save_stdout=sys.stdout\n if out is None :\n encoding=save_stdout.encoding\n if encoding is None or encoding.lower()=='utf-8':\n out=save_stdout.write\n else :\n \n def out(s):\n s=str(s.encode(encoding,'backslashreplace'),encoding)\n save_stdout.write(s)\n sys.stdout=self._fakeout\n \n \n \n \n \n \n save_trace=sys.gettrace()\n save_set_trace=pdb.set_trace\n self.debugger=_OutputRedirectingPdb(save_stdout)\n self.debugger.reset()\n pdb.set_trace=self.debugger.set_trace\n \n \n \n self.save_linecache_getlines=linecache.getlines\n linecache.getlines=self.__patched_linecache_getlines\n \n \n save_displayhook=sys.displayhook\n sys.displayhook=sys.__displayhook__\n \n try :\n return self.__run(test,compileflags,out)\n finally :\n sys.stdout=save_stdout\n pdb.set_trace=save_set_trace\n sys.settrace(save_trace)\n linecache.getlines=self.save_linecache_getlines\n sys.displayhook=save_displayhook\n if clear_globs:\n test.globs.clear()\n import builtins\n builtins._=None\n \n \n \n \n def summarize(self,verbose=None ):\n ''\n\n\n\n\n\n\n\n\n \n if verbose is None :\n verbose=self._verbose\n notests=[]\n passed=[]\n failed=[]\n totalt=totalf=0\n for x in self._name2ft.items():\n name,(f,t)=x\n assert f <=t\n totalt +=t\n totalf +=f\n if t ==0:\n notests.append(name)\n elif f ==0:\n passed.append((name,t))\n else :\n failed.append(x)\n if verbose:\n if notests:\n print(len(notests),\"items had no tests:\")\n notests.sort()\n for thing in notests:\n print(\" \",thing)\n if passed:\n print(len(passed),\"items passed all tests:\")\n passed.sort()\n for thing,count in passed:\n print(\" %3d tests in %s\"%(count,thing))\n if failed:\n print(self.DIVIDER)\n print(len(failed),\"items had failures:\")\n failed.sort()\n for thing,(f,t)in failed:\n print(\" %3d of %3d in %s\"%(f,t,thing))\n if verbose:\n print(totalt,\"tests in\",len(self._name2ft),\"items.\")\n print(totalt -totalf,\"passed and\",totalf,\"failed.\")\n if totalf:\n print(\"***Test Failed***\",totalf,\"failures.\")\n elif verbose:\n print(\"Test passed.\")\n return TestResults(totalf,totalt)\n \n \n \n \n def merge(self,other):\n d=self._name2ft\n for name,(f,t)in other._name2ft.items():\n if name in d:\n \n \n \n \n f2,t2=d[name]\n f=f+f2\n t=t+t2\n d[name]=f,t\n \nclass OutputChecker:\n ''\n\n\n\n\n\n \n def _toAscii(self,s):\n ''\n\n \n return str(s.encode('ASCII','backslashreplace'),\"ASCII\")\n \n def check_output(self,want,got,optionflags):\n ''\n\n\n\n\n\n\n\n \n \n \n \n \n \n \n got=self._toAscii(got)\n want=self._toAscii(want)\n \n \n \n if got ==want:\n return True\n \n \n \n if not (optionflags&DONT_ACCEPT_TRUE_FOR_1):\n if (got,want)==(\"True\\n\",\"1\\n\"):\n return True\n if (got,want)==(\"False\\n\",\"0\\n\"):\n return True\n \n \n \n if not (optionflags&DONT_ACCEPT_BLANKLINE):\n \n want=re.sub('(?m)^%s\\s*?$'%re.escape(BLANKLINE_MARKER),\n '',want)\n \n \n got=re.sub('(?m)^\\s*?$','',got)\n if got ==want:\n return True\n \n \n \n \n if optionflags&NORMALIZE_WHITESPACE:\n got=' '.join(got.split())\n want=' '.join(want.split())\n if got ==want:\n return True\n \n \n \n if optionflags&ELLIPSIS:\n if _ellipsis_match(want,got):\n return True\n \n \n return False\n \n \n def _do_a_fancy_diff(self,want,got,optionflags):\n \n if not optionflags&(REPORT_UDIFF |\n REPORT_CDIFF |\n REPORT_NDIFF):\n return False\n \n \n \n \n \n \n \n \n \n \n \n if optionflags&REPORT_NDIFF:\n return True\n \n \n return want.count('\\n')>2 and got.count('\\n')>2\n \n def output_difference(self,example,got,optionflags):\n ''\n\n\n\n\n \n want=example.want\n \n \n if not (optionflags&DONT_ACCEPT_BLANKLINE):\n got=re.sub('(?m)^[ ]*(?=\\n)',BLANKLINE_MARKER,got)\n \n \n if self._do_a_fancy_diff(want,got,optionflags):\n \n want_lines=want.splitlines(keepends=True )\n got_lines=got.splitlines(keepends=True )\n \n if optionflags&REPORT_UDIFF:\n diff=difflib.unified_diff(want_lines,got_lines,n=2)\n diff=list(diff)[2:]\n kind='unified diff with -expected +actual'\n elif optionflags&REPORT_CDIFF:\n diff=difflib.context_diff(want_lines,got_lines,n=2)\n diff=list(diff)[2:]\n kind='context diff with expected followed by actual'\n elif optionflags&REPORT_NDIFF:\n engine=difflib.Differ(charjunk=difflib.IS_CHARACTER_JUNK)\n diff=list(engine.compare(want_lines,got_lines))\n kind='ndiff with -expected +actual'\n else :\n assert 0,'Bad diff option'\n \n diff=[line.rstrip()+'\\n'for line in diff]\n return'Differences (%s):\\n'%kind+_indent(''.join(diff))\n \n \n \n if want and got:\n return'Expected:\\n%sGot:\\n%s'%(_indent(want),_indent(got))\n elif want:\n return'Expected:\\n%sGot nothing\\n'%_indent(want)\n elif got:\n return'Expected nothing\\nGot:\\n%s'%_indent(got)\n else :\n return'Expected nothing\\nGot nothing\\n'\n \nclass DocTestFailure(Exception):\n ''\n\n\n\n\n\n\n\n\n \n def __init__(self,test,example,got):\n self.test=test\n self.example=example\n self.got=got\n \n def __str__(self):\n return str(self.test)\n \nclass UnexpectedException(Exception):\n ''\n\n\n\n\n\n\n\n\n \n def __init__(self,test,example,exc_info):\n self.test=test\n self.example=example\n self.exc_info=exc_info\n \n def __str__(self):\n return str(self.test)\n \nclass DebugRunner(DocTestRunner):\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n \n def run(self,test,compileflags=None ,out=None ,clear_globs=True ):\n r=DocTestRunner.run(self,test,compileflags,out,False )\n if clear_globs:\n test.globs.clear()\n return r\n \n def report_unexpected_exception(self,out,test,example,exc_info):\n raise UnexpectedException(test,example,exc_info)\n \n def report_failure(self,out,test,example,got):\n raise DocTestFailure(test,example,got)\n \n \n \n \n \n \n \n \nmaster=None\n\ndef testmod(m=None ,name=None ,globs=None ,verbose=None ,\nreport=True ,optionflags=0,extraglobs=None ,\nraise_on_error=False ,exclude_empty=False ):\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n global master\n \n \n if m is None :\n \n \n \n m=sys.modules.get('__main__')\n \n \n if not inspect.ismodule(m):\n raise TypeError(\"testmod: module required; %r\"%(m,))\n \n \n if name is None :\n name=m.__name__\n \n \n finder=DocTestFinder(exclude_empty=exclude_empty)\n \n if raise_on_error:\n runner=DebugRunner(verbose=verbose,optionflags=optionflags)\n else :\n runner=DocTestRunner(verbose=verbose,optionflags=optionflags)\n \n for test in finder.find(m,name,globs=globs,extraglobs=extraglobs):\n runner.run(test)\n \n if report:\n runner.summarize()\n \n if master is None :\n master=runner\n else :\n master.merge(runner)\n \n return TestResults(runner.failures,runner.tries)\n \ndef testfile(filename,module_relative=True ,name=None ,package=None ,\nglobs=None ,verbose=None ,report=True ,optionflags=0,\nextraglobs=None ,raise_on_error=False ,parser=DocTestParser(),\nencoding=None ):\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n global master\n \n if package and not module_relative:\n raise ValueError(\"Package may only be specified for module-\"\n \"relative paths.\")\n \n \n text,filename=_load_testfile(filename,package,module_relative,\n encoding or\"utf-8\")\n \n \n if name is None :\n name=os.path.basename(filename)\n \n \n if globs is None :\n globs={}\n else :\n globs=globs.copy()\n if extraglobs is not None :\n globs.update(extraglobs)\n if'__name__'not in globs:\n globs['__name__']='__main__'\n \n if raise_on_error:\n runner=DebugRunner(verbose=verbose,optionflags=optionflags)\n else :\n runner=DocTestRunner(verbose=verbose,optionflags=optionflags)\n \n \n test=parser.get_doctest(text,globs,name,filename,0)\n runner.run(test)\n \n if report:\n runner.summarize()\n \n if master is None :\n master=runner\n else :\n master.merge(runner)\n \n return TestResults(runner.failures,runner.tries)\n \ndef run_docstring_examples(f,globs,verbose=False ,name=\"NoName\",\ncompileflags=None ,optionflags=0):\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n \n finder=DocTestFinder(verbose=verbose,recurse=False )\n runner=DocTestRunner(verbose=verbose,optionflags=optionflags)\n for test in finder.find(f,name,globs=globs):\n runner.run(test,compileflags=compileflags)\n \n \n \n \n \n_unittest_reportflags=0\n\ndef set_unittest_reportflags(flags):\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n global _unittest_reportflags\n \n if (flags&REPORTING_FLAGS)!=flags:\n raise ValueError(\"Only reporting flags allowed\",flags)\n old=_unittest_reportflags\n _unittest_reportflags=flags\n return old\n \n \nclass DocTestCase(unittest.TestCase):\n\n def __init__(self,test,optionflags=0,setUp=None ,tearDown=None ,\n checker=None ):\n \n unittest.TestCase.__init__(self)\n self._dt_optionflags=optionflags\n self._dt_checker=checker\n self._dt_test=test\n self._dt_setUp=setUp\n self._dt_tearDown=tearDown\n \n def setUp(self):\n test=self._dt_test\n \n if self._dt_setUp is not None :\n self._dt_setUp(test)\n \n def tearDown(self):\n test=self._dt_test\n \n if self._dt_tearDown is not None :\n self._dt_tearDown(test)\n \n test.globs.clear()\n \n def runTest(self):\n test=self._dt_test\n old=sys.stdout\n new=StringIO()\n optionflags=self._dt_optionflags\n \n if not (optionflags&REPORTING_FLAGS):\n \n \n optionflags |=_unittest_reportflags\n \n runner=DocTestRunner(optionflags=optionflags,\n checker=self._dt_checker,verbose=False )\n \n try :\n runner.DIVIDER=\"-\"*70\n failures,tries=runner.run(\n test,out=new.write,clear_globs=False )\n finally :\n sys.stdout=old\n \n if failures:\n raise self.failureException(self.format_failure(new.getvalue()))\n \n def format_failure(self,err):\n test=self._dt_test\n if test.lineno is None :\n lineno='unknown line number'\n else :\n lineno='%s'%test.lineno\n lname='.'.join(test.name.split('.')[-1:])\n return ('Failed doctest test for %s\\n'\n ' File \"%s\", line %s, in %s\\n\\n%s'\n %(test.name,test.filename,lineno,lname,err)\n )\n \n def debug(self):\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n \n self.setUp()\n runner=DebugRunner(optionflags=self._dt_optionflags,\n checker=self._dt_checker,verbose=False )\n runner.run(self._dt_test,clear_globs=False )\n self.tearDown()\n \n def id(self):\n return self._dt_test.name\n \n def __eq__(self,other):\n if type(self)is not type(other):\n return NotImplemented\n \n return self._dt_test ==other._dt_test and self._dt_optionflags ==other._dt_optionflags and self._dt_setUp ==other._dt_setUp and self._dt_tearDown ==other._dt_tearDown and self._dt_checker ==other._dt_checker\n \n def __hash__(self):\n return hash((self._dt_optionflags,self._dt_setUp,self._dt_tearDown,\n self._dt_checker))\n \n def __repr__(self):\n name=self._dt_test.name.split('.')\n return\"%s (%s)\"%(name[-1],'.'.join(name[:-1]))\n \n __str__=__repr__\n \n def shortDescription(self):\n return\"Doctest: \"+self._dt_test.name\n \nclass SkipDocTestCase(DocTestCase):\n def __init__(self,module):\n self.module=module\n DocTestCase.__init__(self,None )\n \n def setUp(self):\n self.skipTest(\"DocTestSuite will not work with -O2 and above\")\n \n def test_skip(self):\n pass\n \n def shortDescription(self):\n return\"Skipping tests from %s\"%self.module.__name__\n \n __str__=shortDescription\n \n \nclass _DocTestSuite(unittest.TestSuite):\n\n def _removeTestAtIndex(self,index):\n pass\n \n \ndef DocTestSuite(module=None ,globs=None ,extraglobs=None ,test_finder=None ,\n**options):\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n \n if test_finder is None :\n test_finder=DocTestFinder()\n \n module=_normalize_module(module)\n tests=test_finder.find(module,globs=globs,extraglobs=extraglobs)\n \n if not tests and sys.flags.optimize >=2:\n \n suite=_DocTestSuite()\n suite.addTest(SkipDocTestCase(module))\n return suite\n elif not tests:\n \n \n \n \n \n \n \n raise ValueError(module,\"has no docstrings\")\n \n tests.sort()\n suite=_DocTestSuite()\n \n for test in tests:\n if len(test.examples)==0:\n continue\n if not test.filename:\n filename=module.__file__\n if filename[-4:]in (\".pyc\",\".pyo\"):\n filename=filename[:-1]\n test.filename=filename\n suite.addTest(DocTestCase(test,**options))\n \n return suite\n \nclass DocFileCase(DocTestCase):\n\n def id(self):\n return'_'.join(self._dt_test.name.split('.'))\n \n def __repr__(self):\n return self._dt_test.filename\n __str__=__repr__\n \n def format_failure(self,err):\n return ('Failed doctest test for %s\\n File \"%s\", line 0\\n\\n%s'\n %(self._dt_test.name,self._dt_test.filename,err)\n )\n \ndef DocFileTest(path,module_relative=True ,package=None ,\nglobs=None ,parser=DocTestParser(),\nencoding=None ,**options):\n if globs is None :\n globs={}\n else :\n globs=globs.copy()\n \n if package and not module_relative:\n raise ValueError(\"Package may only be specified for module-\"\n \"relative paths.\")\n \n \n doc,path=_load_testfile(path,package,module_relative,\n encoding or\"utf-8\")\n \n if\"__file__\"not in globs:\n globs[\"__file__\"]=path\n \n \n name=os.path.basename(path)\n \n \n test=parser.get_doctest(doc,globs,name,path,0)\n return DocFileCase(test,**options)\n \ndef DocFileSuite(*paths,**kw):\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n suite=_DocTestSuite()\n \n \n \n \n if kw.get('module_relative',True ):\n kw['package']=_normalize_module(kw.get('package'))\n \n for path in paths:\n suite.addTest(DocFileTest(path,**kw))\n \n return suite\n \n \n \n \n \ndef script_from_examples(s):\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n output=[]\n for piece in DocTestParser().parse(s):\n if isinstance(piece,Example):\n \n output.append(piece.source[:-1])\n \n want=piece.want\n if want:\n output.append('# Expected:')\n output +=['## '+l for l in want.split('\\n')[:-1]]\n else :\n \n output +=[_comment_line(l)\n for l in piece.split('\\n')[:-1]]\n \n \n while output and output[-1]=='#':\n output.pop()\n while output and output[0]=='#':\n output.pop(0)\n \n \n return'\\n'.join(output)+'\\n'\n \ndef testsource(module,name):\n ''\n\n\n\n\n \n module=_normalize_module(module)\n tests=DocTestFinder().find(module)\n test=[t for t in tests if t.name ==name]\n if not test:\n raise ValueError(name,\"not found in tests\")\n test=test[0]\n testsrc=script_from_examples(test.docstring)\n return testsrc\n \ndef debug_src(src,pm=False ,globs=None ):\n ''\n testsrc=script_from_examples(src)\n debug_script(testsrc,pm,globs)\n \ndef debug_script(src,pm=False ,globs=None ):\n ''\n import pdb\n \n if globs:\n globs=globs.copy()\n else :\n globs={}\n \n if pm:\n try :\n exec(src,globs,globs)\n except :\n print(sys.exc_info()[1])\n p=pdb.Pdb(nosigint=True )\n p.reset()\n p.interaction(None ,sys.exc_info()[2])\n else :\n pdb.Pdb(nosigint=True ).run(\"exec(%r)\"%src,globs,globs)\n \ndef debug(module,name,pm=False ):\n ''\n\n\n\n\n \n module=_normalize_module(module)\n testsrc=testsource(module,name)\n debug_script(testsrc,pm,module.__dict__)\n \n \n \n \nclass _TestClass:\n ''\n\n\n\n\n\n\n\n\n\n\n \n \n def __init__(self,val):\n ''\n\n\n\n\n \n \n self.val=val\n \n def square(self):\n ''\n\n\n\n \n \n self.val=self.val **2\n return self\n \n def get(self):\n ''\n\n\n\n\n \n \n return self.val\n \n__test__={\"_TestClass\":_TestClass,\n\"string\":r\"\"\"\n Example of a string object, searched as-is.\n >>> x = 1; y = 2\n >>> x + y, x * y\n (3, 2)\n \"\"\",\n\n\"bool-int equivalence\":r\"\"\"\n In 2.2, boolean expressions displayed\n 0 or 1. By default, we still accept\n them. This can be disabled by passing\n DONT_ACCEPT_TRUE_FOR_1 to the new\n optionflags argument.\n >>> 4 == 4\n 1\n >>> 4 == 4\n True\n >>> 4 > 4\n 0\n >>> 4 > 4\n False\n \"\"\",\n\n\"blank lines\":r\"\"\"\n Blank lines can be marked with :\n >>> print('foo\\n\\nbar\\n')\n foo\n \n bar\n \n \"\"\",\n\n\"ellipsis\":r\"\"\"\n If the ellipsis flag is used, then '...' can be used to\n elide substrings in the desired output:\n >>> print(list(range(1000))) #doctest: +ELLIPSIS\n [0, 1, 2, ..., 999]\n \"\"\",\n\n\"whitespace normalization\":r\"\"\"\n If the whitespace normalization flag is used, then\n differences in whitespace are ignored.\n >>> print(list(range(30))) #doctest: +NORMALIZE_WHITESPACE\n [0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14,\n 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26,\n 27, 28, 29]\n \"\"\",\n}\n\n\ndef _test():\n parser=argparse.ArgumentParser(description=\"doctest runner\")\n parser.add_argument('-v','--verbose',action='store_true',default=False ,\n help='print very verbose output for all tests')\n parser.add_argument('-o','--option',action='append',\n choices=OPTIONFLAGS_BY_NAME.keys(),default=[],\n help=('specify a doctest option flag to apply'\n ' to the test run; may be specified more'\n ' than once to apply multiple options'))\n parser.add_argument('-f','--fail-fast',action='store_true',\n help=('stop running tests after first failure (this'\n ' is a shorthand for -o FAIL_FAST, and is'\n ' in addition to any other -o options)'))\n parser.add_argument('file',nargs='+',\n help='file containing the tests to run')\n args=parser.parse_args()\n testfiles=args.file\n \n \n verbose=args.verbose\n options=0\n for option in args.option:\n options |=OPTIONFLAGS_BY_NAME[option]\n if args.fail_fast:\n options |=FAIL_FAST\n for filename in testfiles:\n if filename.endswith(\".py\"):\n \n \n \n dirname,filename=os.path.split(filename)\n sys.path.insert(0,dirname)\n m=__import__(filename[:-3])\n del sys.path[0]\n failures,_=testmod(m,verbose=verbose,optionflags=options)\n else :\n failures,_=testfile(filename,module_relative=False ,\n verbose=verbose,optionflags=options)\n if failures:\n return 1\n return 0\n \n \nif __name__ ==\"__main__\":\n sys.exit(_test())\n"], "asyncio.queues": [".py", "''\n\n__all__=['Queue','PriorityQueue','LifoQueue','JoinableQueue',\n'QueueFull','QueueEmpty']\n\nimport collections\nimport heapq\n\nfrom .import events\nfrom .import futures\nfrom .import locks\nfrom .tasks import coroutine\n\n\nclass QueueEmpty(Exception):\n ''\n\n \n pass\n \n \nclass QueueFull(Exception):\n ''\n\n \n pass\n \n \nclass Queue:\n ''\n\n\n\n\n\n\n\n\n \n \n def __init__(self,maxsize=0,*,loop=None ):\n if loop is None :\n self._loop=events.get_event_loop()\n else :\n self._loop=loop\n self._maxsize=maxsize\n \n \n self._getters=collections.deque()\n \n self._putters=collections.deque()\n self._init(maxsize)\n \n def _init(self,maxsize):\n self._queue=collections.deque()\n \n def _get(self):\n return self._queue.popleft()\n \n def _put(self,item):\n self._queue.append(item)\n \n def __repr__(self):\n return'<{} at {:#x} {}>'.format(\n type(self).__name__,id(self),self._format())\n \n def __str__(self):\n return'<{} {}>'.format(type(self).__name__,self._format())\n \n def _format(self):\n result='maxsize={!r}'.format(self._maxsize)\n if getattr(self,'_queue',None ):\n result +=' _queue={!r}'.format(list(self._queue))\n if self._getters:\n result +=' _getters[{}]'.format(len(self._getters))\n if self._putters:\n result +=' _putters[{}]'.format(len(self._putters))\n return result\n \n def _consume_done_getters(self):\n \n while self._getters and self._getters[0].done():\n self._getters.popleft()\n \n def _consume_done_putters(self):\n \n while self._putters and self._putters[0][1].done():\n self._putters.popleft()\n \n def qsize(self):\n ''\n return len(self._queue)\n \n @property\n def maxsize(self):\n ''\n return self._maxsize\n \n def empty(self):\n ''\n return not self._queue\n \n def full(self):\n ''\n\n\n\n \n if self._maxsize <=0:\n return False\n else :\n return self.qsize()>=self._maxsize\n \n @coroutine\n def put(self,item):\n ''\n\n\n\n\n\n \n self._consume_done_getters()\n if self._getters:\n assert not self._queue,(\n 'queue non-empty, why are getters waiting?')\n \n getter=self._getters.popleft()\n \n \n \n self._put(item)\n \n \n getter.set_result(self._get())\n \n elif self._maxsize >0 and self._maxsize <=self.qsize():\n waiter=futures.Future(loop=self._loop)\n \n self._putters.append((item,waiter))\n yield from waiter\n \n else :\n self._put(item)\n \n def put_nowait(self,item):\n ''\n\n\n \n self._consume_done_getters()\n if self._getters:\n assert not self._queue,(\n 'queue non-empty, why are getters waiting?')\n \n getter=self._getters.popleft()\n \n \n \n self._put(item)\n \n \n getter.set_result(self._get())\n \n elif self._maxsize >0 and self._maxsize <=self.qsize():\n raise QueueFull\n else :\n self._put(item)\n \n @coroutine\n def get(self):\n ''\n\n\n\n\n \n self._consume_done_putters()\n if self._putters:\n assert self.full(),'queue not full, why are putters waiting?'\n item,putter=self._putters.popleft()\n self._put(item)\n \n \n \n \n \n self._loop.call_soon(putter._set_result_unless_cancelled,None )\n \n return self._get()\n \n elif self.qsize():\n return self._get()\n else :\n waiter=futures.Future(loop=self._loop)\n \n self._getters.append(waiter)\n return (yield from waiter)\n \n def get_nowait(self):\n ''\n\n\n \n self._consume_done_putters()\n if self._putters:\n assert self.full(),'queue not full, why are putters waiting?'\n item,putter=self._putters.popleft()\n self._put(item)\n \n \n \n putter.set_result(None )\n \n return self._get()\n \n elif self.qsize():\n return self._get()\n else :\n raise QueueEmpty\n \n \nclass PriorityQueue(Queue):\n ''\n\n\n \n \n def _init(self,maxsize):\n self._queue=[]\n \n def _put(self,item,heappush=heapq.heappush):\n heappush(self._queue,item)\n \n def _get(self,heappop=heapq.heappop):\n return heappop(self._queue)\n \n \nclass LifoQueue(Queue):\n ''\n \n def _init(self,maxsize):\n self._queue=[]\n \n def _put(self,item):\n self._queue.append(item)\n \n def _get(self):\n return self._queue.pop()\n \n \nclass JoinableQueue(Queue):\n ''\n \n def __init__(self,maxsize=0,*,loop=None ):\n super().__init__(maxsize=maxsize,loop=loop)\n self._unfinished_tasks=0\n self._finished=locks.Event(loop=self._loop)\n self._finished.set()\n \n def _format(self):\n result=Queue._format(self)\n if self._unfinished_tasks:\n result +=' tasks={}'.format(self._unfinished_tasks)\n return result\n \n def _put(self,item):\n super()._put(item)\n self._unfinished_tasks +=1\n self._finished.clear()\n \n def task_done(self):\n ''\n\n\n\n\n\n\n\n\n\n\n\n \n if self._unfinished_tasks <=0:\n raise ValueError('task_done() called too many times')\n self._unfinished_tasks -=1\n if self._unfinished_tasks ==0:\n self._finished.set()\n \n @coroutine\n def join(self):\n ''\n\n\n\n\n\n \n if self._unfinished_tasks >0:\n yield from self._finished.wait()\n"], "pprint": [".py", "\n\n\n\n\n\n\n\n\n\n\"\"\"Support to pretty-print lists, tuples, & dictionaries recursively.\n\nVery simple, but useful, especially in debugging data structures.\n\nClasses\n-------\n\nPrettyPrinter()\n Handle pretty-printing operations onto a stream using a configured\n set of formatting parameters.\n\nFunctions\n---------\n\npformat()\n Format a Python object into a pretty-printed representation.\n\npprint()\n Pretty-print a Python object to a stream [default is sys.stdout].\n\nsaferepr()\n Generate a 'standard' repr()-like value, but protect against recursive\n data structures.\n\n\"\"\"\n\nimport sys as _sys\nfrom collections import OrderedDict as _OrderedDict\nfrom io import StringIO as _StringIO\n\n__all__=[\"pprint\",\"pformat\",\"isreadable\",\"isrecursive\",\"saferepr\",\n\"PrettyPrinter\"]\n\n\n_commajoin=\", \".join\n_id=id\n_len=len\n_type=type\n\n\ndef pprint(object,stream=None ,indent=1,width=80,depth=None ):\n ''\n printer=PrettyPrinter(\n stream=stream,indent=indent,width=width,depth=depth)\n printer.pprint(object)\n \ndef pformat(object,indent=1,width=80,depth=None ):\n ''\n return PrettyPrinter(indent=indent,width=width,depth=depth).pformat(object)\n \ndef saferepr(object):\n ''\n return _safe_repr(object,{},None ,0)[0]\n \ndef isreadable(object):\n ''\n return _safe_repr(object,{},None ,0)[1]\n \ndef isrecursive(object):\n ''\n return _safe_repr(object,{},None ,0)[2]\n \nclass _safe_key:\n ''\n\n\n\n\n\n\n \n \n __slots__=['obj']\n \n def __init__(self,obj):\n self.obj=obj\n \n def __lt__(self,other):\n try :\n rv=self.obj.__lt__(other.obj)\n except TypeError:\n rv=NotImplemented\n \n if rv is NotImplemented:\n rv=(str(type(self.obj)),id(self.obj))< (str(type(other.obj)),id(other.obj))\n return rv\n \ndef _safe_tuple(t):\n ''\n return _safe_key(t[0]),_safe_key(t[1])\n \nclass PrettyPrinter:\n def __init__(self,indent=1,width=80,depth=None ,stream=None ):\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n indent=int(indent)\n width=int(width)\n assert indent >=0,\"indent must be >= 0\"\n assert depth is None or depth >0,\"depth must be > 0\"\n assert width,\"width must be != 0\"\n self._depth=depth\n self._indent_per_level=indent\n self._width=width\n if stream is not None :\n self._stream=stream\n else :\n self._stream=_sys.stdout\n \n def pprint(self,object):\n self._format(object,self._stream,0,0,{},0)\n self._stream.write(\"\\n\")\n \n def pformat(self,object):\n sio=_StringIO()\n self._format(object,sio,0,0,{},0)\n return sio.getvalue()\n \n def isrecursive(self,object):\n return self.format(object,{},0,0)[2]\n \n def isreadable(self,object):\n s,readable,recursive=self.format(object,{},0,0)\n return readable and not recursive\n \n def _format(self,object,stream,indent,allowance,context,level):\n level=level+1\n import sys\n sys.stderr.write(str(object))\n objid=_id(object)\n if objid in context:\n stream.write(_recursion(object))\n self._recursive=True\n self._readable=False\n return\n rep=self._repr(object,context,level -1)\n typ=_type(object)\n sepLines=_len(rep)>(self._width -1 -indent -allowance)\n write=stream.write\n \n if self._depth and level >self._depth:\n write(rep)\n return\n \n if sepLines:\n r=getattr(typ,\"__repr__\",None )\n if issubclass(typ,dict):\n write('{')\n if self._indent_per_level >1:\n write((self._indent_per_level -1)*' ')\n length=_len(object)\n if length:\n context[objid]=1\n indent=indent+self._indent_per_level\n if issubclass(typ,_OrderedDict):\n items=list(object.items())\n else :\n items=sorted(object.items(),key=_safe_tuple)\n key,ent=items[0]\n rep=self._repr(key,context,level)\n write(rep)\n write(': ')\n self._format(ent,stream,indent+_len(rep)+2,\n allowance+1,context,level)\n if length >1:\n for key,ent in items[1:]:\n rep=self._repr(key,context,level)\n write(',\\n%s%s: '%(' '*indent,rep))\n self._format(ent,stream,indent+_len(rep)+2,\n allowance+1,context,level)\n indent=indent -self._indent_per_level\n del context[objid]\n write('}')\n return\n \n if ((issubclass(typ,list)and r is list.__repr__)or\n (issubclass(typ,tuple)and r is tuple.__repr__)or\n (issubclass(typ,set)and r is set.__repr__)or\n (issubclass(typ,frozenset)and r is frozenset.__repr__)\n ):\n length=_len(object)\n if issubclass(typ,list):\n write('[')\n endchar=']'\n elif issubclass(typ,tuple):\n write('(')\n endchar=')'\n else :\n if not length:\n write(rep)\n return\n if typ is set:\n write('{')\n endchar='}'\n else :\n write(typ.__name__)\n write('({')\n endchar='})'\n indent +=len(typ.__name__)+1\n object=sorted(object,key=_safe_key)\n if self._indent_per_level >1:\n write((self._indent_per_level -1)*' ')\n if length:\n context[objid]=1\n indent=indent+self._indent_per_level\n self._format(object[0],stream,indent,allowance+1,\n context,level)\n if length >1:\n for ent in object[1:]:\n write(',\\n'+' '*indent)\n self._format(ent,stream,indent,\n allowance+1,context,level)\n indent=indent -self._indent_per_level\n del context[objid]\n if issubclass(typ,tuple)and length ==1:\n write(',')\n write(endchar)\n return\n \n write(rep)\n \n def _repr(self,object,context,level):\n repr,readable,recursive=self.format(object,context.copy(),\n self._depth,level)\n if not readable:\n self._readable=False\n if recursive:\n self._recursive=True\n return repr\n \n def format(self,object,context,maxlevels,level):\n ''\n\n\n \n return _safe_repr(object,context,maxlevels,level)\n \n \n \n \ndef _safe_repr(object,context,maxlevels,level):\n typ=_type(object)\n if typ is str:\n if'locale'not in _sys.modules:\n return repr(object),True ,False\n if\"'\"in object and'\"'not in object:\n closure='\"'\n quotes={'\"':'\\\\\"'}\n else :\n closure=\"'\"\n quotes={\"'\":\"\\\\'\"}\n qget=quotes.get\n sio=_StringIO()\n write=sio.write\n for char in object:\n if char.isalpha():\n write(char)\n else :\n write(qget(char,repr(char)[1:-1]))\n return (\"%s%s%s\"%(closure,sio.getvalue(),closure)),True ,False\n \n r=getattr(typ,\"__repr__\",None )\n if issubclass(typ,dict)and r is dict.__repr__:\n if not object:\n return\"{}\",True ,False\n objid=_id(object)\n if maxlevels and level >=maxlevels:\n return\"{...}\",False ,objid in context\n if objid in context:\n return _recursion(object),False ,True\n context[objid]=1\n readable=True\n recursive=False\n components=[]\n append=components.append\n level +=1\n saferepr=_safe_repr\n items=sorted(object.items(),key=_safe_tuple)\n for k,v in items:\n krepr,kreadable,krecur=saferepr(k,context,maxlevels,level)\n vrepr,vreadable,vrecur=saferepr(v,context,maxlevels,level)\n append(\"%s: %s\"%(krepr,vrepr))\n readable=readable and kreadable and vreadable\n if krecur or vrecur:\n recursive=True\n del context[objid]\n return\"{%s}\"%_commajoin(components),readable,recursive\n \n if (issubclass(typ,list)and r is list.__repr__)or (issubclass(typ,tuple)and r is tuple.__repr__):\n if issubclass(typ,list):\n if not object:\n return\"[]\",True ,False\n format=\"[%s]\"\n elif _len(object)==1:\n format=\"(%s,)\"\n else :\n if not object:\n return\"()\",True ,False\n format=\"(%s)\"\n objid=_id(object)\n if maxlevels and level >=maxlevels:\n return format %\"...\",False ,objid in context\n if objid in context:\n return _recursion(object),False ,True\n context[objid]=1\n readable=True\n recursive=False\n components=[]\n append=components.append\n level +=1\n for o in object:\n orepr,oreadable,orecur=_safe_repr(o,context,maxlevels,level)\n append(orepr)\n if not oreadable:\n readable=False\n if orecur:\n recursive=True\n del context[objid]\n return format %_commajoin(components),readable,recursive\n \n rep=repr(object)\n return rep,(rep and not rep.startswith('<')),False\n \n \ndef _recursion(object):\n return (\"\"\n %(_type(object).__name__,_id(object)))\n \n \ndef _perfcheck(object=None ):\n import time\n if object is None :\n object=[(\"string\",(1,2),[3,4],{5:6,7:8})]*100000\n p=PrettyPrinter()\n t1=time.time()\n _safe_repr(object,{},None ,0)\n t2=time.time()\n p.pformat(object)\n t3=time.time()\n print(\"_safe_repr:\",t2 -t1)\n print(\"pformat:\",t3 -t2)\n \nif __name__ ==\"__main__\":\n _perfcheck()\n"], "_sysconfigdata": [".py", "build_time_vars={'HAVE_SYS_WAIT_H':1,'HAVE_UTIL_H':0,'HAVE_SYMLINKAT':1,'HAVE_LIBSENDFILE':0,'SRCDIRS':'Parser Grammar Objects Python Modules Mac','SIZEOF_OFF_T':8,'BASECFLAGS':'-Wno-unused-result','HAVE_UTIME_H':1,'EXTRAMACHDEPPATH':'','HAVE_SYS_TIME_H':1,'CFLAGSFORSHARED':'-fPIC','HAVE_HYPOT':1,'PGSRCS':'\\\\','HAVE_LIBUTIL_H':0,'HAVE_COMPUTED_GOTOS':1,'HAVE_LUTIMES':1,'HAVE_MAKEDEV':1,'HAVE_REALPATH':1,'HAVE_LINUX_TIPC_H':1,'MULTIARCH':'i386-linux-gnu','HAVE_GETWD':1,'HAVE_GCC_ASM_FOR_X64':0,'HAVE_INET_PTON':1,'HAVE_GETHOSTBYNAME_R_6_ARG':1,'SIZEOF__BOOL':1,'HAVE_ZLIB_COPY':1,'ASDLGEN':'python3.3 ../Parser/asdl_c.py','GRAMMAR_INPUT':'../Grammar/Grammar','HOST_GNU_TYPE':'i686-pc-linux-gnu','HAVE_SCHED_RR_GET_INTERVAL':1,'HAVE_BLUETOOTH_H':0,'HAVE_MKFIFO':1,'TIMEMODULE_LIB':0,'LIBM':'-lm','PGENOBJS':'\\\\ \\\\','PYTHONFRAMEWORK':'','GETPGRP_HAVE_ARG':0,'HAVE_MMAP':1,'SHLIB_SUFFIX':'.so','SIZEOF_FLOAT':4,'HAVE_RENAMEAT':1,'HAVE_LANGINFO_H':1,'HAVE_STDLIB_H':1,'PY_CORE_CFLAGS':'-Wno-unused-result -DNDEBUG -g -fwrapv -O2 -Wall -Wstrict-prototypes -g -fstack-protector --param=ssp-buffer-size=4 -Wformat -Werror=format-security -I. -IInclude -I../Include -D_FORTIFY_SOURCE=2 -fPIC -DPy_BUILD_CORE','HAVE_BROKEN_PIPE_BUF':0,'HAVE_CONFSTR':1,'HAVE_SIGTIMEDWAIT':1,'HAVE_FTELLO':1,'READELF':'readelf','HAVE_SIGALTSTACK':1,'TESTTIMEOUT':3600,'PYTHONPATH':':plat-i386-linux-gnu','SIZEOF_WCHAR_T':4,'LIBOBJS':'','HAVE_SYSCONF':1,'MAKESETUP':'../Modules/makesetup','HAVE_UTIMENSAT':1,'HAVE_FCHOWNAT':1,'HAVE_WORKING_TZSET':1,'HAVE_FINITE':1,'HAVE_ASINH':1,'HAVE_SETEUID':1,'CONFIGFILES':'configure configure.ac acconfig.h pyconfig.h.in Makefile.pre.in','HAVE_SETGROUPS':1,'PARSER_OBJS':'\\\\ Parser/myreadline.o Parser/parsetok.o Parser/tokenizer.o','HAVE_MBRTOWC':1,'SIZEOF_INT':4,'HAVE_STDARG_PROTOTYPES':1,'TM_IN_SYS_TIME':0,'HAVE_SYS_TIMES_H':1,'HAVE_LCHOWN':1,'HAVE_SSIZE_T':1,'HAVE_PAUSE':1,'SYSLIBS':'-lm','POSIX_SEMAPHORES_NOT_ENABLED':0,'HAVE_DEVICE_MACROS':1,'BLDSHARED':'i686-linux-gnu-gcc -pthread -shared -Wl,-O1 -Wl,-Bsymbolic-functions -Wl,-Bsymbolic-functions -Wl,-z,relro -Wno-unused-result -DNDEBUG -g -fwrapv -O2 -Wall -Wstrict-prototypes -g -fstack-protector --param=ssp-buffer-size=4 -Wformat -Werror=format-security ','LIBSUBDIRS':'tkinter tkinter/test tkinter/test/test_tkinter \\\\','HAVE_SYS_UN_H':1,'HAVE_SYS_STAT_H':1,'VPATH':'..','INCLDIRSTOMAKE':'/usr/include /usr/include /usr/include/python3.3m /usr/include/python3.3m','HAVE_BROKEN_SEM_GETVALUE':0,'HAVE_TIMEGM':1,'PACKAGE_VERSION':0,'MAJOR_IN_SYSMACROS':0,'HAVE_ATANH':1,'HAVE_GAI_STRERROR':1,'HAVE_SYS_POLL_H':1,'SIZEOF_PTHREAD_T':4,'SIZEOF_FPOS_T':16,'HAVE_CTERMID':1,'HAVE_TMPFILE':1,'HAVE_SETUID':1,'CXX':'i686-linux-gnu-g++ -pthread','srcdir':'..','HAVE_UINT32_T':1,'HAVE_ADDRINFO':1,'HAVE_GETSPENT':1,'SIZEOF_DOUBLE':8,'HAVE_INT32_T':1,'LIBRARY_OBJS_OMIT_FROZEN':'\\\\','HAVE_FUTIMES':1,'CONFINCLUDEPY':'/usr/include/python3.3m','HAVE_RL_COMPLETION_APPEND_CHARACTER':1,'LIBFFI_INCLUDEDIR':'','HAVE_SETGID':1,'HAVE_UINT64_T':1,'EXEMODE':755,'UNIVERSALSDK':'','HAVE_LIBDL':1,'HAVE_GETNAMEINFO':1,'HAVE_STDINT_H':1,'COREPYTHONPATH':':plat-i386-linux-gnu','HAVE_SOCKADDR_STORAGE':1,'HAVE_WAITID':1,'EXTRAPLATDIR':'@EXTRAPLATDIR@','HAVE_ACCEPT4':1,'RUNSHARED':'LD_LIBRARY_PATH=/build/buildd/python3.3-3.3.1/build-shared:','EXE':'','HAVE_SIGACTION':1,'HAVE_CHOWN':1,'HAVE_GETLOGIN':1,'HAVE_TZNAME':0,'PACKAGE_NAME':0,'HAVE_GETPGID':1,'HAVE_GLIBC_MEMMOVE_BUG':0,'BUILD_GNU_TYPE':'i686-pc-linux-gnu','HAVE_LINUX_CAN_H':1,'DYNLOADFILE':'dynload_shlib.o','HAVE_PWRITE':1,'BUILDEXE':'','HAVE_OPENPTY':1,'HAVE_LOCKF':1,'HAVE_COPYSIGN':1,'HAVE_PREAD':1,'HAVE_DLOPEN':1,'HAVE_SYS_KERN_CONTROL_H':0,'PY_FORMAT_LONG_LONG':'\"ll\"','HAVE_TCSETPGRP':1,'HAVE_SETSID':1,'HAVE_STRUCT_STAT_ST_BIRTHTIME':0,'HAVE_STRING_H':1,'LDLIBRARY':'libpython3.3m.so','INSTALL_SCRIPT':'/usr/bin/install -c','HAVE_SYS_XATTR_H':1,'HAVE_CURSES_IS_TERM_RESIZED':1,'HAVE_TMPNAM_R':1,'STRICT_SYSV_CURSES':\"/* Don't use ncurses extensions */\",'WANT_SIGFPE_HANDLER':1,'HAVE_INT64_T':1,'HAVE_STAT_TV_NSEC':1,'HAVE_SYS_MKDEV_H':0,'HAVE_BROKEN_POLL':0,'HAVE_IF_NAMEINDEX':1,'HAVE_GETPWENT':1,'PSRCS':'\\\\','RANLIB':'ranlib','HAVE_WCSCOLL':1,'WITH_NEXT_FRAMEWORK':0,'ASDLGEN_FILES':'../Parser/asdl.py ../Parser/asdl_c.py','HAVE_RL_PRE_INPUT_HOOK':1,'PACKAGE_URL':0,'SHLIB_EXT':0,'HAVE_SYS_LOADAVG_H':0,'HAVE_LIBIEEE':0,'HAVE_SEM_OPEN':1,'HAVE_TERM_H':1,'IO_OBJS':'\\\\','IO_H':'Modules/_io/_iomodule.h','HAVE_STATVFS':1,'VERSION':'3.3','HAVE_GETC_UNLOCKED':1,'MACHDEPS':'plat-i386-linux-gnu @EXTRAPLATDIR@','SUBDIRSTOO':'Include Lib Misc','HAVE_SETREUID':1,'HAVE_ERFC':1,'HAVE_SETRESUID':1,'LINKFORSHARED':'-Xlinker -export-dynamic -Wl,-O1 -Wl,-Bsymbolic-functions','HAVE_SYS_TYPES_H':1,'HAVE_GETPAGESIZE':1,'HAVE_SETEGID':1,'HAVE_PTY_H':1,'HAVE_STRUCT_STAT_ST_FLAGS':0,'HAVE_WCHAR_H':1,'HAVE_FSEEKO':1,'Py_ENABLE_SHARED':1,'HAVE_SIGRELSE':1,'HAVE_PTHREAD_INIT':0,'FILEMODE':644,'HAVE_SYS_RESOURCE_H':1,'HAVE_READLINKAT':1,'PYLONG_BITS_IN_DIGIT':0,'LINKCC':'i686-linux-gnu-gcc -pthread','HAVE_SETLOCALE':1,'HAVE_CHROOT':1,'HAVE_OPENAT':1,'HAVE_FEXECVE':1,'LDCXXSHARED':'i686-linux-gnu-g++ -pthread -shared -Wl,-O1 -Wl,-Bsymbolic-functions','DIST':'README ChangeLog configure configure.ac acconfig.h pyconfig.h.in Makefile.pre.in Include Lib Misc Ext-dummy','HAVE_MKNOD':1,'PY_LDFLAGS':'-Wl,-Bsymbolic-functions -Wl,-z,relro','HAVE_BROKEN_MBSTOWCS':0,'LIBRARY_OBJS':'\\\\','HAVE_LOG1P':1,'SIZEOF_VOID_P':4,'HAVE_FCHOWN':1,'PYTHONFRAMEWORKPREFIX':'','HAVE_LIBDLD':0,'HAVE_TGAMMA':1,'HAVE_ERRNO_H':1,'HAVE_IO_H':0,'OTHER_LIBTOOL_OPT':'','HAVE_POLL_H':1,'PY_CPPFLAGS':'-I. -IInclude -I../Include -D_FORTIFY_SOURCE=2','XMLLIBSUBDIRS':'xml xml/dom xml/etree xml/parsers xml/sax','GRAMMAR_H':'Include/graminit.h','TANH_PRESERVES_ZERO_SIGN':1,'HAVE_GETLOADAVG':1,'UNICODE_DEPS':'\\\\ \\\\','HAVE_GETCWD':1,'MANDIR':'/usr/share/man','MACHDESTLIB':'/usr/lib/python3.3','GRAMMAR_C':'Python/graminit.c','PGOBJS':'\\\\','HAVE_DEV_PTMX':1,'HAVE_UINTPTR_T':1,'HAVE_SCHED_SETAFFINITY':1,'PURIFY':'','HAVE_DECL_ISINF':1,'HAVE_RL_CALLBACK':1,'HAVE_WRITEV':1,'HAVE_GETHOSTBYNAME_R_5_ARG':0,'HAVE_SYS_AUDIOIO_H':0,'EXT_SUFFIX':'.cpython-33m.so','SIZEOF_LONG_LONG':8,'DLINCLDIR':'.','HAVE_PATHCONF':1,'HAVE_UNLINKAT':1,'MKDIR_P':'/bin/mkdir -p','HAVE_ALTZONE':0,'SCRIPTDIR':'/usr/lib','OPCODETARGETGEN_FILES':'\\\\','HAVE_GETSPNAM':1,'HAVE_SYS_TERMIO_H':0,'HAVE_ATTRIBUTE_FORMAT_PARSETUPLE':0,'HAVE_PTHREAD_H':1,'Py_DEBUG':0,'HAVE_STRUCT_STAT_ST_BLOCKS':1,'X87_DOUBLE_ROUNDING':1,'SIZEOF_TIME_T':4,'HAVE_DYNAMIC_LOADING':1,'HAVE_DIRECT_H':0,'SRC_GDB_HOOKS':'../Tools/gdb/libpython.py','HAVE_GETADDRINFO':1,'HAVE_BROKEN_NICE':0,'HAVE_DIRENT_H':1,'HAVE_WCSXFRM':1,'HAVE_RL_COMPLETION_DISPLAY_MATCHES_HOOK':1,'HAVE_FSTATVFS':1,'PYTHON':'python','HAVE_OSX105_SDK':0,'BINDIR':'/usr/bin','TESTPYTHON':'LD_LIBRARY_PATH=/build/buildd/python3.3-3.3.1/build-shared: ./python','ARFLAGS':'rc','PLATDIR':'plat-i386-linux-gnu','HAVE_ASM_TYPES_H':1,'PY3LIBRARY':'libpython3.so','HAVE_PLOCK':0,'FLOCK_NEEDS_LIBBSD':0,'WITH_TSC':0,'HAVE_LIBREADLINE':1,'MACHDEP':'linux','HAVE_SELECT':1,'LDFLAGS':'-Wl,-Bsymbolic-functions -Wl,-z,relro','HAVE_HSTRERROR':1,'SOABI':'cpython-33m','HAVE_GETTIMEOFDAY':1,'HAVE_LIBRESOLV':0,'HAVE_UNSETENV':1,'HAVE_TM_ZONE':1,'HAVE_GETPGRP':1,'HAVE_FLOCK':1,'HAVE_SYS_BSDTTY_H':0,'SUBDIRS':'','PYTHONFRAMEWORKINSTALLDIR':'','PACKAGE_BUGREPORT':0,'HAVE_CLOCK':1,'HAVE_GETPEERNAME':1,'SIZEOF_PID_T':4,'HAVE_CONIO_H':0,'HAVE_FSTATAT':1,'HAVE_NETPACKET_PACKET_H':1,'HAVE_WAIT3':1,'DESTPATH':'','HAVE_STAT_TV_NSEC2':0,'HAVE_GETRESGID':1,'HAVE_UCS4_TCL':0,'SIGNED_RIGHT_SHIFT_ZERO_FILLS':0,'HAVE_TIMES':1,'HAVE_UNAME':1,'HAVE_ERF':1,'SIZEOF_SHORT':2,'HAVE_NCURSES_H':1,'HAVE_SYS_SENDFILE_H':1,'HAVE_CTERMID_R':0,'HAVE_TMPNAM':1,'prefix':'/usr','HAVE_NICE':1,'WITH_THREAD':1,'LN':'ln','TESTRUNNER':'LD_LIBRARY_PATH=/build/buildd/python3.3-3.3.1/build-shared: ./python ../Tools/scripts/run_tests.py','HAVE_SIGINTERRUPT':1,'HAVE_SETPGID':1,'RETSIGTYPE':'void','HAVE_SCHED_GET_PRIORITY_MAX':1,'HAVE_SYS_SYS_DOMAIN_H':0,'HAVE_SYS_DIR_H':0,'HAVE__GETPTY':0,'HAVE_BLUETOOTH_BLUETOOTH_H':1,'HAVE_BIND_TEXTDOMAIN_CODESET':1,'HAVE_POLL':1,'PYTHON_OBJS':'\\\\','HAVE_WAITPID':1,'USE_INLINE':1,'HAVE_FUTIMENS':1,'USE_COMPUTED_GOTOS':1,'MAINCC':'i686-linux-gnu-gcc -pthread','HAVE_SOCKETPAIR':1,'HAVE_PROCESS_H':0,'HAVE_SETVBUF':1,'HAVE_FDOPENDIR':1,'CONFINCLUDEDIR':'/usr/include','BINLIBDEST':'/usr/lib/python3.3','HAVE_SYS_IOCTL_H':1,'HAVE_SYSEXITS_H':1,'LDLAST':'','HAVE_SYS_FILE_H':1,'HAVE_RL_COMPLETION_SUPPRESS_APPEND':1,'HAVE_RL_COMPLETION_MATCHES':1,'HAVE_TCGETPGRP':1,'SIZEOF_SIZE_T':4,'HAVE_EPOLL_CREATE1':1,'HAVE_SYS_SELECT_H':1,'HAVE_CLOCK_GETTIME':1,'CFLAGS':'-Wno-unused-result -DNDEBUG -g -fwrapv -O2 -Wall -Wstrict-prototypes -g -fstack-protector --param=ssp-buffer-size=4 -Wformat -Werror=format-security ','HAVE_SNPRINTF':1,'BLDLIBRARY':'-lpython3.3m','PARSER_HEADERS':'\\\\','SO':'.so','LIBRARY':'libpython3.3m.a','HAVE_FPATHCONF':1,'HAVE_TERMIOS_H':1,'HAVE_BROKEN_PTHREAD_SIGMASK':0,'AST_H':'Include/Python-ast.h','HAVE_GCC_UINT128_T':0,'HAVE_ACOSH':1,'MODOBJS':'Modules/_threadmodule.o Modules/signalmodule.o Modules/arraymodule.o Modules/mathmodule.o Modules/_math.o Modules/_struct.o Modules/timemodule.o Modules/_randommodule.o Modules/atexitmodule.o Modules/_elementtree.o Modules/_pickle.o Modules/_datetimemodule.o Modules/_bisectmodule.o Modules/_heapqmodule.o Modules/unicodedata.o Modules/fcntlmodule.o Modules/spwdmodule.o Modules/grpmodule.o Modules/selectmodule.o Modules/socketmodule.o Modules/_posixsubprocess.o Modules/md5module.o Modules/sha1module.o Modules/sha256module.o Modules/sha512module.o Modules/syslogmodule.o Modules/binascii.o Modules/zlibmodule.o Modules/pyexpat.o Modules/posixmodule.o Modules/errnomodule.o Modules/pwdmodule.o Modules/_sre.o Modules/_codecsmodule.o Modules/_weakref.o Modules/_functoolsmodule.o Modules/operator.o Modules/_collectionsmodule.o Modules/itertoolsmodule.o Modules/_localemodule.o Modules/_iomodule.o Modules/iobase.o Modules/fileio.o Modules/bytesio.o Modules/bufferedio.o Modules/textio.o Modules/stringio.o Modules/zipimport.o Modules/faulthandler.o Modules/symtablemodule.o Modules/xxsubtype.o','AST_C':'Python/Python-ast.c','HAVE_SYS_NDIR_H':0,'DESTDIRS':'/usr /usr/lib /usr/lib/python3.3 /usr/lib/python3.3/lib-dynload','HAVE_SIGNAL_H':1,'PACKAGE_TARNAME':0,'HAVE_GETPRIORITY':1,'INCLUDEDIR':'/usr/include','HAVE_INTTYPES_H':1,'SIGNAL_OBJS':'','HAVE_READV':1,'HAVE_SETHOSTNAME':1,'MODLIBS':'-lrt -lexpat -L/usr/lib -lz -lexpat','CC':'i686-linux-gnu-gcc -pthread','HAVE_LCHMOD':0,'SIZEOF_UINTPTR_T':4,'LIBPC':'/usr/lib/i386-linux-gnu/pkgconfig','BYTESTR_DEPS':'\\\\','HAVE_MKDIRAT':1,'LIBPL':'/usr/lib/python3.3/config-3.3m-i386-linux-gnu','HAVE_SHADOW_H':1,'HAVE_SYS_EVENT_H':0,'INSTALL':'/usr/bin/install -c','HAVE_GCC_ASM_FOR_X87':1,'HAVE_BROKEN_UNSETENV':0,'BASECPPFLAGS':'','DOUBLE_IS_BIG_ENDIAN_IEEE754':0,'HAVE_STRUCT_STAT_ST_RDEV':1,'HAVE_SEM_UNLINK':1,'BUILDPYTHON':'python','HAVE_RL_CATCH_SIGNAL':1,'HAVE_DECL_TZNAME':0,'RESSRCDIR':'Mac/Resources/framework','HAVE_PTHREAD_SIGMASK':1,'HAVE_UTIMES':1,'DISTDIRS':'Include Lib Misc Ext-dummy','HAVE_FDATASYNC':1,'HAVE_USABLE_WCHAR_T':0,'PY_FORMAT_SIZE_T':'\"z\"','HAVE_SCHED_SETSCHEDULER':1,'VA_LIST_IS_ARRAY':0,'HAVE_LINUX_NETLINK_H':1,'HAVE_SETREGID':1,'HAVE_STROPTS_H':1,'LDVERSION':'3.3m','abs_builddir':'/build/buildd/python3.3-3.3.1/build-shared','SITEPATH':'','HAVE_GETHOSTBYNAME':0,'HAVE_SIGPENDING':1,'HAVE_KQUEUE':0,'HAVE_SYNC':1,'HAVE_GETSID':1,'HAVE_ROUND':1,'HAVE_STRFTIME':1,'AST_H_DIR':'Include','HAVE_PIPE2':1,'AST_C_DIR':'Python','TESTPYTHONOPTS':'','HAVE_DEV_PTC':0,'GETTIMEOFDAY_NO_TZ':0,'HAVE_NET_IF_H':1,'HAVE_SENDFILE':1,'HAVE_SETPGRP':1,'HAVE_SEM_GETVALUE':1,'CONFIGURE_LDFLAGS':'-Wl,-Bsymbolic-functions -Wl,-z,relro','DLLLIBRARY':'','PYTHON_FOR_BUILD':'./python -E','SETPGRP_HAVE_ARG':0,'HAVE_INET_ATON':1,'INSTALL_SHARED':'/usr/bin/install -c -m 555','WITH_DOC_STRINGS':1,'OPCODETARGETS_H':'\\\\','HAVE_INITGROUPS':1,'HAVE_LINKAT':1,'BASEMODLIBS':'','SGI_ABI':'','HAVE_SCHED_SETPARAM':1,'OPT':'-DNDEBUG -g -fwrapv -O2 -Wall -Wstrict-prototypes','HAVE_POSIX_FADVISE':1,'datarootdir':'/usr/share','HAVE_MEMRCHR':1,'HGTAG':'','HAVE_MEMMOVE':1,'HAVE_GETRESUID':1,'DOUBLE_IS_ARM_MIXED_ENDIAN_IEEE754':0,'HAVE_LSTAT':1,'AR':'ar','HAVE_WAIT4':1,'HAVE_SYS_MODEM_H':0,'INSTSONAME':'libpython3.3m.so.1.0','HAVE_SYS_STATVFS_H':1,'HAVE_LGAMMA':1,'HAVE_PROTOTYPES':1,'HAVE_SYS_UIO_H':1,'MAJOR_IN_MKDEV':0,'QUICKTESTOPTS':'-x test_subprocess test_io test_lib2to3 \\\\','HAVE_SYS_DEVPOLL_H':0,'HAVE_CHFLAGS':0,'HAVE_FSYNC':1,'HAVE_FCHMOD':1,'INCLUDEPY':'/usr/include/python3.3m','HAVE_SEM_TIMEDWAIT':1,'LDLIBRARYDIR':'','HAVE_STRUCT_TM_TM_ZONE':1,'HAVE_CURSES_H':1,'TIME_WITH_SYS_TIME':1,'HAVE_DUP2':1,'ENABLE_IPV6':1,'WITH_VALGRIND':0,'HAVE_SETITIMER':1,'THREADOBJ':'Python/thread.o','LOCALMODLIBS':'-lrt -lexpat -L/usr/lib -lz -lexpat','HAVE_MEMORY_H':1,'HAVE_GETITIMER':1,'HAVE_C99_BOOL':1,'INSTALL_DATA':'/usr/bin/install -c -m 644','PGEN':'Parser/pgen','HAVE_GRP_H':1,'HAVE_WCSFTIME':1,'AIX_GENUINE_CPLUSPLUS':0,'HAVE_LIBINTL_H':1,'SHELL':'/bin/sh','HAVE_UNISTD_H':1,'EXTRATESTOPTS':'','HAVE_EXECV':1,'HAVE_FSEEK64':0,'MVWDELCH_IS_EXPRESSION':1,'DESTSHARED':'/usr/lib/python3.3/lib-dynload','OPCODETARGETGEN':'\\\\','LIBDEST':'/usr/lib/python3.3','CCSHARED':'-fPIC','HAVE_EXPM1':1,'HAVE_DLFCN_H':1,'exec_prefix':'/usr','HAVE_READLINK':1,'WINDOW_HAS_FLAGS':1,'HAVE_FTELL64':0,'HAVE_STRLCPY':0,'MACOSX_DEPLOYMENT_TARGET':'','HAVE_SYS_SYSCALL_H':1,'DESTLIB':'/usr/lib/python3.3','LDSHARED':'i686-linux-gnu-gcc -pthread -shared -Wl,-O1 -Wl,-Bsymbolic-functions -Wl,-Bsymbolic-functions -Wl,-z,relro -Wno-unused-result -DNDEBUG -g -fwrapv -O2 -Wall -Wstrict-prototypes -g -fstack-protector --param=ssp-buffer-size=4 -Wformat -Werror=format-security ','HGVERSION':'','PYTHON_HEADERS':'\\\\','HAVE_STRINGS_H':1,'DOUBLE_IS_LITTLE_ENDIAN_IEEE754':1,'HAVE_POSIX_FALLOCATE':1,'HAVE_DIRFD':1,'HAVE_LOG2':1,'HAVE_GETPID':1,'HAVE_ALARM':1,'MACHDEP_OBJS':'','HAVE_SPAWN_H':1,'HAVE_FORK':1,'HAVE_SETRESGID':1,'HAVE_FCHMODAT':1,'HAVE_CLOCK_GETRES':1,'MACHDEPPATH':':plat-i386-linux-gnu','STDC_HEADERS':1,'HAVE_SETPRIORITY':1,'LIBC':'','HAVE_SYS_EPOLL_H':1,'HAVE_SYS_UTSNAME_H':1,'HAVE_PUTENV':1,'HAVE_CURSES_RESIZE_TERM':1,'HAVE_FUTIMESAT':1,'WITH_DYLD':0,'INSTALL_PROGRAM':'/usr/bin/install -c','LIBS':'-lpthread -ldl -lutil','HAVE_TRUNCATE':1,'TESTOPTS':'','PROFILE_TASK':'../Tools/pybench/pybench.py -n 2 --with-gc --with-syscheck','HAVE_CURSES_RESIZETERM':1,'ABIFLAGS':'m','HAVE_GETGROUPLIST':1,'OBJECT_OBJS':'\\\\','HAVE_MKNODAT':1,'HAVE_ST_BLOCKS':1,'HAVE_STRUCT_STAT_ST_GEN':0,'SYS_SELECT_WITH_SYS_TIME':1,'SHLIBS':'-lpthread -ldl -lutil','HAVE_GETGROUPS':1,'MODULE_OBJS':'\\\\','PYTHONFRAMEWORKDIR':'no-framework','HAVE_FCNTL_H':1,'HAVE_LINK':1,'HAVE_SIGWAIT':1,'HAVE_GAMMA':1,'HAVE_SYS_LOCK_H':0,'HAVE_FORKPTY':1,'HAVE_SOCKADDR_SA_LEN':0,'HAVE_TEMPNAM':1,'HAVE_STRUCT_STAT_ST_BLKSIZE':1,'HAVE_MKFIFOAT':1,'HAVE_SIGWAITINFO':1,'HAVE_FTIME':1,'HAVE_EPOLL':1,'HAVE_SYS_SOCKET_H':1,'HAVE_LARGEFILE_SUPPORT':1,'CONFIGURE_CFLAGS':'-g -fstack-protector --param=ssp-buffer-size=4 -Wformat -Werror=format-security','HAVE_PTHREAD_DESTRUCTOR':0,'CONFIGURE_CPPFLAGS':'-D_FORTIFY_SOURCE=2','HAVE_SYMLINK':1,'HAVE_LONG_LONG':1,'HAVE_IEEEFP_H':0,'LIBDIR':'/usr/lib','HAVE_PTHREAD_KILL':1,'TESTPATH':'','HAVE_STRDUP':1,'POBJS':'\\\\','NO_AS_NEEDED':'-Wl,--no-as-needed','HAVE_LONG_DOUBLE':1,'HGBRANCH':'','DISTFILES':'README ChangeLog configure configure.ac acconfig.h pyconfig.h.in Makefile.pre.in','PTHREAD_SYSTEM_SCHED_SUPPORTED':1,'HAVE_FACCESSAT':1,'AST_ASDL':'../Parser/Python.asdl','CPPFLAGS':'-I. -IInclude -I../Include -D_FORTIFY_SOURCE=2','HAVE_MKTIME':1,'HAVE_NDIR_H':0,'PY_CFLAGS':'-Wno-unused-result -DNDEBUG -g -fwrapv -O2 -Wall -Wstrict-prototypes -g -fstack-protector --param=ssp-buffer-size=4 -Wformat -Werror=format-security ','LIBOBJDIR':'Python/','HAVE_LINUX_CAN_RAW_H':1,'HAVE_GETHOSTBYNAME_R_3_ARG':0,'PACKAGE_STRING':0,'GNULD':'yes','LOG1P_DROPS_ZERO_SIGN':0,'HAVE_FTRUNCATE':1,'WITH_LIBINTL':0,'HAVE_MREMAP':1,'HAVE_DECL_ISNAN':1,'HAVE_KILLPG':1,'SIZEOF_LONG':4,'HAVE_DECL_ISFINITE':1,'HAVE_IPA_PURE_CONST_BUG':0,'WITH_PYMALLOC':1,'abs_srcdir':'/build/buildd/python3.3-3.3.1/build-shared/..','HAVE_FCHDIR':1,'HAVE_BROKEN_POSIX_SEMAPHORES':0,'AC_APPLE_UNIVERSAL_BUILD':0,'PGENSRCS':'\\\\ \\\\','DIRMODE':755,'HAVE_GETHOSTBYNAME_R':1,'HAVE_LCHFLAGS':0,'HAVE_SYS_PARAM_H':1,'SIZEOF_LONG_DOUBLE':12,'CONFIG_ARGS':\"'--enable-shared' '--prefix=/usr' '--enable-ipv6' '--enable-loadable-sqlite-extensions' '--with-dbmliborder=bdb:gdbm' '--with-computed-gotos' '--with-system-expat' '--with-system-ffi' '--with-fpectl' 'CC=i686-linux-gnu-gcc' 'CFLAGS=-g -fstack-protector --param=ssp-buffer-size=4 -Wformat -Werror=format-security ' 'LDFLAGS=-Wl,-Bsymbolic-functions -Wl,-z,relro' 'CPPFLAGS=-D_FORTIFY_SOURCE=2'\",'HAVE_SCHED_H':1,'HAVE_KILL':1}\n\n"], "encodings.cp1125": [".py", "''\n\n\n\nimport codecs\n\n\n\nclass Codec(codecs.Codec):\n\n def encode(self,input,errors='strict'):\n return codecs.charmap_encode(input,errors,encoding_map)\n \n def decode(self,input,errors='strict'):\n return codecs.charmap_decode(input,errors,decoding_table)\n \nclass IncrementalEncoder(codecs.IncrementalEncoder):\n def encode(self,input,final=False ):\n return codecs.charmap_encode(input,self.errors,encoding_map)[0]\n \nclass IncrementalDecoder(codecs.IncrementalDecoder):\n def decode(self,input,final=False ):\n return codecs.charmap_decode(input,self.errors,decoding_table)[0]\n \nclass StreamWriter(Codec,codecs.StreamWriter):\n pass\n \nclass StreamReader(Codec,codecs.StreamReader):\n pass\n \n \n \ndef getregentry():\n return codecs.CodecInfo(\n name='cp1125',\n encode=Codec().encode,\n decode=Codec().decode,\n incrementalencoder=IncrementalEncoder,\n incrementaldecoder=IncrementalDecoder,\n streamreader=StreamReader,\n streamwriter=StreamWriter,\n )\n \n \n \ndecoding_map=codecs.make_identity_dict(range(256))\ndecoding_map.update({\n0x0080:0x0410,\n0x0081:0x0411,\n0x0082:0x0412,\n0x0083:0x0413,\n0x0084:0x0414,\n0x0085:0x0415,\n0x0086:0x0416,\n0x0087:0x0417,\n0x0088:0x0418,\n0x0089:0x0419,\n0x008a:0x041a,\n0x008b:0x041b,\n0x008c:0x041c,\n0x008d:0x041d,\n0x008e:0x041e,\n0x008f:0x041f,\n0x0090:0x0420,\n0x0091:0x0421,\n0x0092:0x0422,\n0x0093:0x0423,\n0x0094:0x0424,\n0x0095:0x0425,\n0x0096:0x0426,\n0x0097:0x0427,\n0x0098:0x0428,\n0x0099:0x0429,\n0x009a:0x042a,\n0x009b:0x042b,\n0x009c:0x042c,\n0x009d:0x042d,\n0x009e:0x042e,\n0x009f:0x042f,\n0x00a0:0x0430,\n0x00a1:0x0431,\n0x00a2:0x0432,\n0x00a3:0x0433,\n0x00a4:0x0434,\n0x00a5:0x0435,\n0x00a6:0x0436,\n0x00a7:0x0437,\n0x00a8:0x0438,\n0x00a9:0x0439,\n0x00aa:0x043a,\n0x00ab:0x043b,\n0x00ac:0x043c,\n0x00ad:0x043d,\n0x00ae:0x043e,\n0x00af:0x043f,\n0x00b0:0x2591,\n0x00b1:0x2592,\n0x00b2:0x2593,\n0x00b3:0x2502,\n0x00b4:0x2524,\n0x00b5:0x2561,\n0x00b6:0x2562,\n0x00b7:0x2556,\n0x00b8:0x2555,\n0x00b9:0x2563,\n0x00ba:0x2551,\n0x00bb:0x2557,\n0x00bc:0x255d,\n0x00bd:0x255c,\n0x00be:0x255b,\n0x00bf:0x2510,\n0x00c0:0x2514,\n0x00c1:0x2534,\n0x00c2:0x252c,\n0x00c3:0x251c,\n0x00c4:0x2500,\n0x00c5:0x253c,\n0x00c6:0x255e,\n0x00c7:0x255f,\n0x00c8:0x255a,\n0x00c9:0x2554,\n0x00ca:0x2569,\n0x00cb:0x2566,\n0x00cc:0x2560,\n0x00cd:0x2550,\n0x00ce:0x256c,\n0x00cf:0x2567,\n0x00d0:0x2568,\n0x00d1:0x2564,\n0x00d2:0x2565,\n0x00d3:0x2559,\n0x00d4:0x2558,\n0x00d5:0x2552,\n0x00d6:0x2553,\n0x00d7:0x256b,\n0x00d8:0x256a,\n0x00d9:0x2518,\n0x00da:0x250c,\n0x00db:0x2588,\n0x00dc:0x2584,\n0x00dd:0x258c,\n0x00de:0x2590,\n0x00df:0x2580,\n0x00e0:0x0440,\n0x00e1:0x0441,\n0x00e2:0x0442,\n0x00e3:0x0443,\n0x00e4:0x0444,\n0x00e5:0x0445,\n0x00e6:0x0446,\n0x00e7:0x0447,\n0x00e8:0x0448,\n0x00e9:0x0449,\n0x00ea:0x044a,\n0x00eb:0x044b,\n0x00ec:0x044c,\n0x00ed:0x044d,\n0x00ee:0x044e,\n0x00ef:0x044f,\n0x00f0:0x0401,\n0x00f1:0x0451,\n0x00f2:0x0490,\n0x00f3:0x0491,\n0x00f4:0x0404,\n0x00f5:0x0454,\n0x00f6:0x0406,\n0x00f7:0x0456,\n0x00f8:0x0407,\n0x00f9:0x0457,\n0x00fa:0x00b7,\n0x00fb:0x221a,\n0x00fc:0x2116,\n0x00fd:0x00a4,\n0x00fe:0x25a0,\n0x00ff:0x00a0,\n})\n\n\n\ndecoding_table=(\n'\\x00'\n'\\x01'\n'\\x02'\n'\\x03'\n'\\x04'\n'\\x05'\n'\\x06'\n'\\x07'\n'\\x08'\n'\\t'\n'\\n'\n'\\x0b'\n'\\x0c'\n'\\r'\n'\\x0e'\n'\\x0f'\n'\\x10'\n'\\x11'\n'\\x12'\n'\\x13'\n'\\x14'\n'\\x15'\n'\\x16'\n'\\x17'\n'\\x18'\n'\\x19'\n'\\x1a'\n'\\x1b'\n'\\x1c'\n'\\x1d'\n'\\x1e'\n'\\x1f'\n' '\n'!'\n'\"'\n'#'\n'$'\n'%'\n'&'\n\"'\"\n'('\n')'\n'*'\n'+'\n','\n'-'\n'.'\n'/'\n'0'\n'1'\n'2'\n'3'\n'4'\n'5'\n'6'\n'7'\n'8'\n'9'\n':'\n';'\n'<'\n'='\n'>'\n'?'\n'@'\n'A'\n'B'\n'C'\n'D'\n'E'\n'F'\n'G'\n'H'\n'I'\n'J'\n'K'\n'L'\n'M'\n'N'\n'O'\n'P'\n'Q'\n'R'\n'S'\n'T'\n'U'\n'V'\n'W'\n'X'\n'Y'\n'Z'\n'['\n'\\\\'\n']'\n'^'\n'_'\n'`'\n'a'\n'b'\n'c'\n'd'\n'e'\n'f'\n'g'\n'h'\n'i'\n'j'\n'k'\n'l'\n'm'\n'n'\n'o'\n'p'\n'q'\n'r'\n's'\n't'\n'u'\n'v'\n'w'\n'x'\n'y'\n'z'\n'{'\n'|'\n'}'\n'~'\n'\\x7f'\n'\\u0410'\n'\\u0411'\n'\\u0412'\n'\\u0413'\n'\\u0414'\n'\\u0415'\n'\\u0416'\n'\\u0417'\n'\\u0418'\n'\\u0419'\n'\\u041a'\n'\\u041b'\n'\\u041c'\n'\\u041d'\n'\\u041e'\n'\\u041f'\n'\\u0420'\n'\\u0421'\n'\\u0422'\n'\\u0423'\n'\\u0424'\n'\\u0425'\n'\\u0426'\n'\\u0427'\n'\\u0428'\n'\\u0429'\n'\\u042a'\n'\\u042b'\n'\\u042c'\n'\\u042d'\n'\\u042e'\n'\\u042f'\n'\\u0430'\n'\\u0431'\n'\\u0432'\n'\\u0433'\n'\\u0434'\n'\\u0435'\n'\\u0436'\n'\\u0437'\n'\\u0438'\n'\\u0439'\n'\\u043a'\n'\\u043b'\n'\\u043c'\n'\\u043d'\n'\\u043e'\n'\\u043f'\n'\\u2591'\n'\\u2592'\n'\\u2593'\n'\\u2502'\n'\\u2524'\n'\\u2561'\n'\\u2562'\n'\\u2556'\n'\\u2555'\n'\\u2563'\n'\\u2551'\n'\\u2557'\n'\\u255d'\n'\\u255c'\n'\\u255b'\n'\\u2510'\n'\\u2514'\n'\\u2534'\n'\\u252c'\n'\\u251c'\n'\\u2500'\n'\\u253c'\n'\\u255e'\n'\\u255f'\n'\\u255a'\n'\\u2554'\n'\\u2569'\n'\\u2566'\n'\\u2560'\n'\\u2550'\n'\\u256c'\n'\\u2567'\n'\\u2568'\n'\\u2564'\n'\\u2565'\n'\\u2559'\n'\\u2558'\n'\\u2552'\n'\\u2553'\n'\\u256b'\n'\\u256a'\n'\\u2518'\n'\\u250c'\n'\\u2588'\n'\\u2584'\n'\\u258c'\n'\\u2590'\n'\\u2580'\n'\\u0440'\n'\\u0441'\n'\\u0442'\n'\\u0443'\n'\\u0444'\n'\\u0445'\n'\\u0446'\n'\\u0447'\n'\\u0448'\n'\\u0449'\n'\\u044a'\n'\\u044b'\n'\\u044c'\n'\\u044d'\n'\\u044e'\n'\\u044f'\n'\\u0401'\n'\\u0451'\n'\\u0490'\n'\\u0491'\n'\\u0404'\n'\\u0454'\n'\\u0406'\n'\\u0456'\n'\\u0407'\n'\\u0457'\n'\\xb7'\n'\\u221a'\n'\\u2116'\n'\\xa4'\n'\\u25a0'\n'\\xa0'\n)\n\n\n\nencoding_map={\n0x0000:0x0000,\n0x0001:0x0001,\n0x0002:0x0002,\n0x0003:0x0003,\n0x0004:0x0004,\n0x0005:0x0005,\n0x0006:0x0006,\n0x0007:0x0007,\n0x0008:0x0008,\n0x0009:0x0009,\n0x000a:0x000a,\n0x000b:0x000b,\n0x000c:0x000c,\n0x000d:0x000d,\n0x000e:0x000e,\n0x000f:0x000f,\n0x0010:0x0010,\n0x0011:0x0011,\n0x0012:0x0012,\n0x0013:0x0013,\n0x0014:0x0014,\n0x0015:0x0015,\n0x0016:0x0016,\n0x0017:0x0017,\n0x0018:0x0018,\n0x0019:0x0019,\n0x001a:0x001a,\n0x001b:0x001b,\n0x001c:0x001c,\n0x001d:0x001d,\n0x001e:0x001e,\n0x001f:0x001f,\n0x0020:0x0020,\n0x0021:0x0021,\n0x0022:0x0022,\n0x0023:0x0023,\n0x0024:0x0024,\n0x0025:0x0025,\n0x0026:0x0026,\n0x0027:0x0027,\n0x0028:0x0028,\n0x0029:0x0029,\n0x002a:0x002a,\n0x002b:0x002b,\n0x002c:0x002c,\n0x002d:0x002d,\n0x002e:0x002e,\n0x002f:0x002f,\n0x0030:0x0030,\n0x0031:0x0031,\n0x0032:0x0032,\n0x0033:0x0033,\n0x0034:0x0034,\n0x0035:0x0035,\n0x0036:0x0036,\n0x0037:0x0037,\n0x0038:0x0038,\n0x0039:0x0039,\n0x003a:0x003a,\n0x003b:0x003b,\n0x003c:0x003c,\n0x003d:0x003d,\n0x003e:0x003e,\n0x003f:0x003f,\n0x0040:0x0040,\n0x0041:0x0041,\n0x0042:0x0042,\n0x0043:0x0043,\n0x0044:0x0044,\n0x0045:0x0045,\n0x0046:0x0046,\n0x0047:0x0047,\n0x0048:0x0048,\n0x0049:0x0049,\n0x004a:0x004a,\n0x004b:0x004b,\n0x004c:0x004c,\n0x004d:0x004d,\n0x004e:0x004e,\n0x004f:0x004f,\n0x0050:0x0050,\n0x0051:0x0051,\n0x0052:0x0052,\n0x0053:0x0053,\n0x0054:0x0054,\n0x0055:0x0055,\n0x0056:0x0056,\n0x0057:0x0057,\n0x0058:0x0058,\n0x0059:0x0059,\n0x005a:0x005a,\n0x005b:0x005b,\n0x005c:0x005c,\n0x005d:0x005d,\n0x005e:0x005e,\n0x005f:0x005f,\n0x0060:0x0060,\n0x0061:0x0061,\n0x0062:0x0062,\n0x0063:0x0063,\n0x0064:0x0064,\n0x0065:0x0065,\n0x0066:0x0066,\n0x0067:0x0067,\n0x0068:0x0068,\n0x0069:0x0069,\n0x006a:0x006a,\n0x006b:0x006b,\n0x006c:0x006c,\n0x006d:0x006d,\n0x006e:0x006e,\n0x006f:0x006f,\n0x0070:0x0070,\n0x0071:0x0071,\n0x0072:0x0072,\n0x0073:0x0073,\n0x0074:0x0074,\n0x0075:0x0075,\n0x0076:0x0076,\n0x0077:0x0077,\n0x0078:0x0078,\n0x0079:0x0079,\n0x007a:0x007a,\n0x007b:0x007b,\n0x007c:0x007c,\n0x007d:0x007d,\n0x007e:0x007e,\n0x007f:0x007f,\n0x00a0:0x00ff,\n0x00a4:0x00fd,\n0x00b7:0x00fa,\n0x0401:0x00f0,\n0x0404:0x00f4,\n0x0406:0x00f6,\n0x0407:0x00f8,\n0x0410:0x0080,\n0x0411:0x0081,\n0x0412:0x0082,\n0x0413:0x0083,\n0x0414:0x0084,\n0x0415:0x0085,\n0x0416:0x0086,\n0x0417:0x0087,\n0x0418:0x0088,\n0x0419:0x0089,\n0x041a:0x008a,\n0x041b:0x008b,\n0x041c:0x008c,\n0x041d:0x008d,\n0x041e:0x008e,\n0x041f:0x008f,\n0x0420:0x0090,\n0x0421:0x0091,\n0x0422:0x0092,\n0x0423:0x0093,\n0x0424:0x0094,\n0x0425:0x0095,\n0x0426:0x0096,\n0x0427:0x0097,\n0x0428:0x0098,\n0x0429:0x0099,\n0x042a:0x009a,\n0x042b:0x009b,\n0x042c:0x009c,\n0x042d:0x009d,\n0x042e:0x009e,\n0x042f:0x009f,\n0x0430:0x00a0,\n0x0431:0x00a1,\n0x0432:0x00a2,\n0x0433:0x00a3,\n0x0434:0x00a4,\n0x0435:0x00a5,\n0x0436:0x00a6,\n0x0437:0x00a7,\n0x0438:0x00a8,\n0x0439:0x00a9,\n0x043a:0x00aa,\n0x043b:0x00ab,\n0x043c:0x00ac,\n0x043d:0x00ad,\n0x043e:0x00ae,\n0x043f:0x00af,\n0x0440:0x00e0,\n0x0441:0x00e1,\n0x0442:0x00e2,\n0x0443:0x00e3,\n0x0444:0x00e4,\n0x0445:0x00e5,\n0x0446:0x00e6,\n0x0447:0x00e7,\n0x0448:0x00e8,\n0x0449:0x00e9,\n0x044a:0x00ea,\n0x044b:0x00eb,\n0x044c:0x00ec,\n0x044d:0x00ed,\n0x044e:0x00ee,\n0x044f:0x00ef,\n0x0451:0x00f1,\n0x0454:0x00f5,\n0x0456:0x00f7,\n0x0457:0x00f9,\n0x0490:0x00f2,\n0x0491:0x00f3,\n0x2116:0x00fc,\n0x221a:0x00fb,\n0x2500:0x00c4,\n0x2502:0x00b3,\n0x250c:0x00da,\n0x2510:0x00bf,\n0x2514:0x00c0,\n0x2518:0x00d9,\n0x251c:0x00c3,\n0x2524:0x00b4,\n0x252c:0x00c2,\n0x2534:0x00c1,\n0x253c:0x00c5,\n0x2550:0x00cd,\n0x2551:0x00ba,\n0x2552:0x00d5,\n0x2553:0x00d6,\n0x2554:0x00c9,\n0x2555:0x00b8,\n0x2556:0x00b7,\n0x2557:0x00bb,\n0x2558:0x00d4,\n0x2559:0x00d3,\n0x255a:0x00c8,\n0x255b:0x00be,\n0x255c:0x00bd,\n0x255d:0x00bc,\n0x255e:0x00c6,\n0x255f:0x00c7,\n0x2560:0x00cc,\n0x2561:0x00b5,\n0x2562:0x00b6,\n0x2563:0x00b9,\n0x2564:0x00d1,\n0x2565:0x00d2,\n0x2566:0x00cb,\n0x2567:0x00cf,\n0x2568:0x00d0,\n0x2569:0x00ca,\n0x256a:0x00d8,\n0x256b:0x00d7,\n0x256c:0x00ce,\n0x2580:0x00df,\n0x2584:0x00dc,\n0x2588:0x00db,\n0x258c:0x00dd,\n0x2590:0x00de,\n0x2591:0x00b0,\n0x2592:0x00b1,\n0x2593:0x00b2,\n0x25a0:0x00fe,\n}\n"], "crypto_js.rollups.sha224": [".js", "/*\nCryptoJS v3.1.2\ncode.google.com/p/crypto-js\n(c) 2009-2013 by Jeff Mott. All rights reserved.\ncode.google.com/p/crypto-js/wiki/License\n*/\nvar CryptoJS=CryptoJS||function(g,l){var f={},k=f.lib={},h=function(){},m=k.Base={extend:function(a){h.prototype=this;var c=new h;a&&c.mixIn(a);c.hasOwnProperty(\"init\")||(c.init=function(){c.$super.init.apply(this,arguments)});c.init.prototype=c;c.$super=this;return c},create:function(){var a=this.extend();a.init.apply(a,arguments);return a},init:function(){},mixIn:function(a){for(var c in a)a.hasOwnProperty(c)&&(this[c]=a[c]);a.hasOwnProperty(\"toString\")&&(this.toString=a.toString)},clone:function(){return this.init.prototype.extend(this)}},\nq=k.WordArray=m.extend({init:function(a,c){a=this.words=a||[];this.sigBytes=c!=l?c:4*a.length},toString:function(a){return(a||s).stringify(this)},concat:function(a){var c=this.words,d=a.words,b=this.sigBytes;a=a.sigBytes;this.clamp();if(b%4)for(var e=0;e>>2]|=(d[e>>>2]>>>24-8*(e%4)&255)<<24-8*((b+e)%4);else if(65535>>2]=d[e>>>2];else c.push.apply(c,d);this.sigBytes+=a;return this},clamp:function(){var a=this.words,c=this.sigBytes;a[c>>>2]&=4294967295<<\n32-8*(c%4);a.length=g.ceil(c/4)},clone:function(){var a=m.clone.call(this);a.words=this.words.slice(0);return a},random:function(a){for(var c=[],d=0;d>>2]>>>24-8*(b%4)&255;d.push((e>>>4).toString(16));d.push((e&15).toString(16))}return d.join(\"\")},parse:function(a){for(var c=a.length,d=[],b=0;b>>3]|=parseInt(a.substr(b,\n2),16)<<24-4*(b%8);return new q.init(d,c/2)}},n=t.Latin1={stringify:function(a){var c=a.words;a=a.sigBytes;for(var d=[],b=0;b>>2]>>>24-8*(b%4)&255));return d.join(\"\")},parse:function(a){for(var c=a.length,d=[],b=0;b>>2]|=(a.charCodeAt(b)&255)<<24-8*(b%4);return new q.init(d,c)}},j=t.Utf8={stringify:function(a){try{return decodeURIComponent(escape(n.stringify(a)))}catch(c){throw Error(\"Malformed UTF-8 data\");}},parse:function(a){return n.parse(unescape(encodeURIComponent(a)))}},\nw=k.BufferedBlockAlgorithm=m.extend({reset:function(){this._data=new q.init;this._nDataBytes=0},_append:function(a){\"string\"==typeof a&&(a=j.parse(a));this._data.concat(a);this._nDataBytes+=a.sigBytes},_process:function(a){var c=this._data,d=c.words,b=c.sigBytes,e=this.blockSize,f=b/(4*e),f=a?g.ceil(f):g.max((f|0)-this._minBufferSize,0);a=f*e;b=g.min(4*a,b);if(a){for(var u=0;un;){var j;a:{j=s;for(var w=g.sqrt(j),v=2;v<=w;v++)if(!(j%v)){j=!1;break a}j=!0}j&&(8>n&&(m[n]=t(g.pow(s,0.5))),q[n]=t(g.pow(s,1/3)),n++);s++}var a=[],f=f.SHA256=h.extend({_doReset:function(){this._hash=new k.init(m.slice(0))},_doProcessBlock:function(c,d){for(var b=this._hash.words,e=b[0],f=b[1],g=b[2],k=b[3],h=b[4],l=b[5],m=b[6],n=b[7],p=0;64>p;p++){if(16>p)a[p]=\nc[d+p]|0;else{var j=a[p-15],r=a[p-2];a[p]=((j<<25|j>>>7)^(j<<14|j>>>18)^j>>>3)+a[p-7]+((r<<15|r>>>17)^(r<<13|r>>>19)^r>>>10)+a[p-16]}j=n+((h<<26|h>>>6)^(h<<21|h>>>11)^(h<<7|h>>>25))+(h&l^~h&m)+q[p]+a[p];r=((e<<30|e>>>2)^(e<<19|e>>>13)^(e<<10|e>>>22))+(e&f^e&g^f&g);n=m;m=l;l=h;h=k+j|0;k=g;g=f;f=e;e=j+r|0}b[0]=b[0]+e|0;b[1]=b[1]+f|0;b[2]=b[2]+g|0;b[3]=b[3]+k|0;b[4]=b[4]+h|0;b[5]=b[5]+l|0;b[6]=b[6]+m|0;b[7]=b[7]+n|0},_doFinalize:function(){var a=this._data,d=a.words,b=8*this._nDataBytes,e=8*a.sigBytes;\nd[e>>>5]|=128<<24-e%32;d[(e+64>>>9<<4)+14]=g.floor(b/4294967296);d[(e+64>>>9<<4)+15]=b;a.sigBytes=4*d.length;this._process();return this._hash},clone:function(){var a=h.clone.call(this);a._hash=this._hash.clone();return a}});l.SHA256=h._createHelper(f);l.HmacSHA256=h._createHmacHelper(f)})(Math);\n(function(){var g=CryptoJS,l=g.lib.WordArray,f=g.algo,k=f.SHA256,f=f.SHA224=k.extend({_doReset:function(){this._hash=new l.init([3238371032,914150663,812702999,4144912697,4290775857,1750603025,1694076839,3204075428])},_doFinalize:function(){var f=k._doFinalize.call(this);f.sigBytes-=4;return f}});g.SHA224=k._createHelper(f);g.HmacSHA224=k._createHmacHelper(f)})();\n"], "io": [".py", "import builtins\n\nopen=builtins.open\n\n\nSEEK_SET=0\nSEEK_CUR=1\nSEEK_END=2\n\nr\"\"\"File-like objects that read from or write to a string buffer.\n\nThis implements (nearly) all stdio methods.\n\nf = StringIO() # ready for writing\nf = StringIO(buf) # ready for reading\nf.close() # explicitly release resources held\nflag = f.isatty() # always false\npos = f.tell() # get current position\nf.seek(pos) # set current position\nf.seek(pos, mode) # mode 0: absolute; 1: relative; 2: relative to EOF\nbuf = f.read() # read until EOF\nbuf = f.read(n) # read up to n bytes\nbuf = f.readline() # read until end of line ('\\n') or EOF\nlist = f.readlines()# list of f.readline() results until EOF\nf.truncate([size]) # truncate file at to at most size (default: current pos)\nf.write(buf) # write at current position\nf.writelines(list) # for line in list: f.write(line)\nf.getvalue() # return whole file's contents as a string\n\nNotes:\n- Using a real file is often faster (but less convenient).\n- There's also a much faster implementation in C, called cStringIO, but\n it's not subclassable.\n- fileno() is left unimplemented so that code which uses it triggers\n an exception early.\n- Seeking far beyond EOF and then writing will insert real null\n bytes that occupy space in the buffer.\n- There's a simple test set (see end of this file).\n\"\"\"\ntry :\n from errno import EINVAL\nexcept ImportError:\n EINVAL=22\n \n__all__=[\"StringIO\"]\n\ndef _complain_ifclosed(closed):\n if closed:\n raise ValueError(\"I/O operation on closed file\")\n \nclass StringIO:\n ''\n\n\n\n\n\n\n\n\n\n \n def __init__(self,buf=''):\n self.buf=buf\n self.len=len(buf)\n self.buflist=[]\n self.pos=0\n self.closed=False\n self.softspace=0\n \n def __iter__(self):\n return self\n \n def next(self):\n ''\n\n\n\n\n \n _complain_ifclosed(self.closed)\n r=self.readline()\n if not r:\n raise StopIteration\n return r\n \n def close(self):\n ''\n \n if not self.closed:\n self.closed=True\n del self.buf,self.pos\n \n def isatty(self):\n ''\n\n \n _complain_ifclosed(self.closed)\n return False\n \n def seek(self,pos,mode=0):\n ''\n\n\n\n\n\n\n \n _complain_ifclosed(self.closed)\n if self.buflist:\n self.buf +=''.join(self.buflist)\n self.buflist=[]\n if mode ==1:\n pos +=self.pos\n elif mode ==2:\n pos +=self.len\n self.pos=max(0,pos)\n \n def tell(self):\n ''\n _complain_ifclosed(self.closed)\n return self.pos\n \n def read(self,n=-1):\n ''\n\n\n\n\n\n \n _complain_ifclosed(self.closed)\n if self.buflist:\n self.buf +=''.join(self.buflist)\n self.buflist=[]\n if n is None or n <0:\n newpos=self.len\n else :\n newpos=min(self.pos+n,self.len)\n r=self.buf[self.pos:newpos]\n self.pos=newpos\n return r\n \n def readline(self,length=None ):\n ''\n\n\n\n\n\n\n\n\n\n\n \n _complain_ifclosed(self.closed)\n if self.buflist:\n self.buf +=''.join(self.buflist)\n self.buflist=[]\n i=self.buf.find('\\n',self.pos)\n if i <0:\n newpos=self.len\n else :\n newpos=i+1\n if length is not None and length >=0:\n if self.pos+length slen:\n self.buflist.append('\\0'*(spos -slen))\n slen=spos\n newpos=spos+len(s)\n if spos slen:\n slen=newpos\n else :\n self.buflist.append(s)\n slen=newpos\n self.len=slen\n self.pos=newpos\n \n def writelines(self,iterable):\n ''\n\n\n\n\n\n \n write=self.write\n for line in iterable:\n write(line)\n \n def flush(self):\n ''\n \n _complain_ifclosed(self.closed)\n \n def getvalue(self):\n ''\n\n\n\n\n\n\n\n\n \n _complain_ifclosed(self.closed)\n if self.buflist:\n self.buf +=''.join(self.buflist)\n self.buflist=[]\n return self.buf\n \n \nTextIOWrapper=StringIO\n\n\nclass RawIOBase:\n\n def read(self,n=-1):\n pass\n def readall(self):\n pass\n def readinto(self,b):\n pass\n def write(self,b):\n pass\n \n \nBufferedIOBase=RawIOBase\nBufferedReader=RawIOBase\nBytesIO=StringIO\n\n\n"], "sysconfig": [".py", "''\n\n\n\n\nvariables={'TANH_PRESERVES_ZERO_SIGN':0,'WITH_DOC_STRINGS':0}\n\ndef get_config_var(var):\n if var in variables:\n return variables[var]\n \n raise NotImplementedError(\"sysconfig.py:get_config_var: variable '%s' does not exist\"%variable)\n"], "xml.sax.saxutils": [".py", "''\n\n\n\n\nimport os,urllib.parse,urllib.request\nimport io\nfrom .import handler\nfrom .import xmlreader\n\ndef __dict_replace(s,d):\n ''\n for key,value in d.items():\n s=s.replace(key,value)\n return s\n \ndef escape(data,entities={}):\n ''\n\n\n\n\n \n \n \n data=data.replace(\"&\",\"&\")\n data=data.replace(\">\",\">\")\n data=data.replace(\"<\",\"<\")\n if entities:\n data=__dict_replace(data,entities)\n return data\n \ndef unescape(data,entities={}):\n ''\n\n\n\n\n \n data=data.replace(\"<\",\"<\")\n data=data.replace(\">\",\">\")\n if entities:\n data=__dict_replace(data,entities)\n \n return data.replace(\"&\",\"&\")\n \ndef quoteattr(data,entities={}):\n ''\n\n\n\n\n\n\n\n\n \n entities=entities.copy()\n entities.update({'\\n':' ','\\r':' ','\\t':' '})\n data=escape(data,entities)\n if'\"'in data:\n if\"'\"in data:\n data='\"%s\"'%data.replace('\"',\""\")\n else :\n data=\"'%s'\"%data\n else :\n data='\"%s\"'%data\n return data\n \n \ndef _gettextwriter(out,encoding):\n if out is None :\n import sys\n return sys.stdout\n \n if isinstance(out,io.TextIOBase):\n \n return out\n \n \n if isinstance(out,io.RawIOBase):\n \n \n class _wrapper:\n __class__=out.__class__\n def __getattr__(self,name):\n return getattr(out,name)\n buffer=_wrapper()\n buffer.close=lambda :None\n else :\n \n \n buffer=io.BufferedIOBase()\n buffer.writable=lambda :True\n buffer.write=out.write\n try :\n \n \n buffer.seekable=out.seekable\n buffer.tell=out.tell\n except AttributeError:\n pass\n return io.TextIOWrapper(buffer,encoding=encoding,\n errors='xmlcharrefreplace',\n newline='\\n',\n write_through=True )\n \nclass XMLGenerator(handler.ContentHandler):\n\n def __init__(self,out=None ,encoding=\"iso-8859-1\",short_empty_elements=False ):\n handler.ContentHandler.__init__(self)\n out=_gettextwriter(out,encoding)\n self._write=out.write\n self._flush=out.flush\n self._ns_contexts=[{}]\n self._current_context=self._ns_contexts[-1]\n self._undeclared_ns_maps=[]\n self._encoding=encoding\n self._short_empty_elements=short_empty_elements\n self._pending_start_element=False\n \n def _qname(self,name):\n ''\n if name[0]:\n \n \n \n \n if'http://www.w3.org/XML/1998/namespace'==name[0]:\n return'xml:'+name[1]\n \n prefix=self._current_context[name[0]]\n if prefix:\n \n return prefix+\":\"+name[1]\n \n return name[1]\n \n def _finish_pending_start_element(self,endElement=False ):\n if self._pending_start_element:\n self._write('>')\n self._pending_start_element=False\n \n \n \n def startDocument(self):\n self._write('\\n'%\n self._encoding)\n \n def endDocument(self):\n self._flush()\n \n def startPrefixMapping(self,prefix,uri):\n self._ns_contexts.append(self._current_context.copy())\n self._current_context[uri]=prefix\n self._undeclared_ns_maps.append((prefix,uri))\n \n def endPrefixMapping(self,prefix):\n self._current_context=self._ns_contexts[-1]\n del self._ns_contexts[-1]\n \n def startElement(self,name,attrs):\n self._finish_pending_start_element()\n self._write('<'+name)\n for (name,value)in attrs.items():\n self._write(' %s=%s'%(name,quoteattr(value)))\n if self._short_empty_elements:\n self._pending_start_element=True\n else :\n self._write(\">\")\n \n def endElement(self,name):\n if self._pending_start_element:\n self._write('/>')\n self._pending_start_element=False\n else :\n self._write(''%name)\n \n def startElementNS(self,name,qname,attrs):\n self._finish_pending_start_element()\n self._write('<'+self._qname(name))\n \n for prefix,uri in self._undeclared_ns_maps:\n if prefix:\n self._write(' xmlns:%s=\"%s\"'%(prefix,uri))\n else :\n self._write(' xmlns=\"%s\"'%uri)\n self._undeclared_ns_maps=[]\n \n for (name,value)in attrs.items():\n self._write(' %s=%s'%(self._qname(name),quoteattr(value)))\n if self._short_empty_elements:\n self._pending_start_element=True\n else :\n self._write(\">\")\n \n def endElementNS(self,name,qname):\n if self._pending_start_element:\n self._write('/>')\n self._pending_start_element=False\n else :\n self._write(''%self._qname(name))\n \n def characters(self,content):\n if content:\n self._finish_pending_start_element()\n self._write(escape(content))\n \n def ignorableWhitespace(self,content):\n if content:\n self._finish_pending_start_element()\n self._write(content)\n \n def processingInstruction(self,target,data):\n self._finish_pending_start_element()\n self._write(''%(target,data))\n \n \nclass XMLFilterBase(xmlreader.XMLReader):\n ''\n\n\n\n\n \n \n def __init__(self,parent=None ):\n xmlreader.XMLReader.__init__(self)\n self._parent=parent\n \n \n \n def error(self,exception):\n self._err_handler.error(exception)\n \n def fatalError(self,exception):\n self._err_handler.fatalError(exception)\n \n def warning(self,exception):\n self._err_handler.warning(exception)\n \n \n \n def setDocumentLocator(self,locator):\n self._cont_handler.setDocumentLocator(locator)\n \n def startDocument(self):\n self._cont_handler.startDocument()\n \n def endDocument(self):\n self._cont_handler.endDocument()\n \n def startPrefixMapping(self,prefix,uri):\n self._cont_handler.startPrefixMapping(prefix,uri)\n \n def endPrefixMapping(self,prefix):\n self._cont_handler.endPrefixMapping(prefix)\n \n def startElement(self,name,attrs):\n self._cont_handler.startElement(name,attrs)\n \n def endElement(self,name):\n self._cont_handler.endElement(name)\n \n def startElementNS(self,name,qname,attrs):\n self._cont_handler.startElementNS(name,qname,attrs)\n \n def endElementNS(self,name,qname):\n self._cont_handler.endElementNS(name,qname)\n \n def characters(self,content):\n self._cont_handler.characters(content)\n \n def ignorableWhitespace(self,chars):\n self._cont_handler.ignorableWhitespace(chars)\n \n def processingInstruction(self,target,data):\n self._cont_handler.processingInstruction(target,data)\n \n def skippedEntity(self,name):\n self._cont_handler.skippedEntity(name)\n \n \n \n def notationDecl(self,name,publicId,systemId):\n self._dtd_handler.notationDecl(name,publicId,systemId)\n \n def unparsedEntityDecl(self,name,publicId,systemId,ndata):\n self._dtd_handler.unparsedEntityDecl(name,publicId,systemId,ndata)\n \n \n \n def resolveEntity(self,publicId,systemId):\n return self._ent_handler.resolveEntity(publicId,systemId)\n \n \n \n def parse(self,source):\n self._parent.setContentHandler(self)\n self._parent.setErrorHandler(self)\n self._parent.setEntityResolver(self)\n self._parent.setDTDHandler(self)\n self._parent.parse(source)\n \n def setLocale(self,locale):\n self._parent.setLocale(locale)\n \n def getFeature(self,name):\n return self._parent.getFeature(name)\n \n def setFeature(self,name,state):\n self._parent.setFeature(name,state)\n \n def getProperty(self,name):\n return self._parent.getProperty(name)\n \n def setProperty(self,name,value):\n self._parent.setProperty(name,value)\n \n \n \n def getParent(self):\n return self._parent\n \n def setParent(self,parent):\n self._parent=parent\n \n \n \ndef prepare_input_source(source,base=\"\"):\n ''\n \n \n if isinstance(source,str):\n source=xmlreader.InputSource(source)\n elif hasattr(source,\"read\"):\n f=source\n source=xmlreader.InputSource()\n source.setByteStream(f)\n if hasattr(f,\"name\"):\n source.setSystemId(f.name)\n \n if source.getByteStream()is None :\n sysid=source.getSystemId()\n basehead=os.path.dirname(os.path.normpath(base))\n sysidfilename=os.path.join(basehead,sysid)\n if os.path.isfile(sysidfilename):\n source.setSystemId(sysidfilename)\n f=open(sysidfilename,\"rb\")\n else :\n source.setSystemId(urllib.parse.urljoin(base,sysid))\n f=urllib.request.urlopen(source.getSystemId())\n \n source.setByteStream(f)\n \n return source\n"], "browser.object_storage": [".py", "import pickle\n\nclass __UnProvided():\n pass\n \n \nclass ObjectStorage():\n\n def __init__(self,storage):\n self.storage=storage\n \n def __delitem__(self,key):\n del self.storage[pickle.dumps(key)]\n \n def __getitem__(self,key):\n return pickle.loads(self.storage[pickle.dumps(key)])\n \n def __setitem__(self,key,value):\n self.storage[pickle.dumps(key)]=pickle.dumps(value)\n \n def __contains__(self,key):\n return pickle.dumps(key)in self.storage\n \n def get(self,key,default=None ):\n if pickle.dumps(key)in self.storage:\n return self.storage[pickle.dumps(key)]\n return default\n \n def pop(self,key,default=__UnProvided()):\n if type(default)is __UnProvided or pickle.dumps(key)in self.storage:\n return pickle.loads(self.storage.pop(pickle.dumps(key)))\n return default\n \n def __iter__(self):\n keys=self.keys()\n return keys.__iter__()\n \n def keys(self):\n return [pickle.loads(key)for key in self.storage.keys()]\n \n def values(self):\n return [pickle.loads(val)for val in self.storage.values()]\n \n def items(self):\n return list(zip(self.keys(),self.values()))\n \n def clear(self):\n self.storage.clear()\n \n def __len__(self):\n return len(self.storage)\n"], "xml.dom": [".py", "''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\nclass Node:\n ''\n __slots__=()\n \n \n \n \n \n \n \n \n ELEMENT_NODE=1\n ATTRIBUTE_NODE=2\n TEXT_NODE=3\n CDATA_SECTION_NODE=4\n ENTITY_REFERENCE_NODE=5\n ENTITY_NODE=6\n PROCESSING_INSTRUCTION_NODE=7\n COMMENT_NODE=8\n DOCUMENT_NODE=9\n DOCUMENT_TYPE_NODE=10\n DOCUMENT_FRAGMENT_NODE=11\n NOTATION_NODE=12\n \n \n \nINDEX_SIZE_ERR=1\nDOMSTRING_SIZE_ERR=2\nHIERARCHY_REQUEST_ERR=3\nWRONG_DOCUMENT_ERR=4\nINVALID_CHARACTER_ERR=5\nNO_DATA_ALLOWED_ERR=6\nNO_MODIFICATION_ALLOWED_ERR=7\nNOT_FOUND_ERR=8\nNOT_SUPPORTED_ERR=9\nINUSE_ATTRIBUTE_ERR=10\nINVALID_STATE_ERR=11\nSYNTAX_ERR=12\nINVALID_MODIFICATION_ERR=13\nNAMESPACE_ERR=14\nINVALID_ACCESS_ERR=15\nVALIDATION_ERR=16\n\n\nclass DOMException(Exception):\n ''\n \n \n def __init__(self,*args,**kw):\n if self.__class__ is DOMException:\n raise RuntimeError(\n \"DOMException should not be instantiated directly\")\n Exception.__init__(self,*args,**kw)\n \n def _get_code(self):\n return self.code\n \n \nclass IndexSizeErr(DOMException):\n code=INDEX_SIZE_ERR\n \nclass DomstringSizeErr(DOMException):\n code=DOMSTRING_SIZE_ERR\n \nclass HierarchyRequestErr(DOMException):\n code=HIERARCHY_REQUEST_ERR\n \nclass WrongDocumentErr(DOMException):\n code=WRONG_DOCUMENT_ERR\n \nclass InvalidCharacterErr(DOMException):\n code=INVALID_CHARACTER_ERR\n \nclass NoDataAllowedErr(DOMException):\n code=NO_DATA_ALLOWED_ERR\n \nclass NoModificationAllowedErr(DOMException):\n code=NO_MODIFICATION_ALLOWED_ERR\n \nclass NotFoundErr(DOMException):\n code=NOT_FOUND_ERR\n \nclass NotSupportedErr(DOMException):\n code=NOT_SUPPORTED_ERR\n \nclass InuseAttributeErr(DOMException):\n code=INUSE_ATTRIBUTE_ERR\n \nclass InvalidStateErr(DOMException):\n code=INVALID_STATE_ERR\n \nclass SyntaxErr(DOMException):\n code=SYNTAX_ERR\n \nclass InvalidModificationErr(DOMException):\n code=INVALID_MODIFICATION_ERR\n \nclass NamespaceErr(DOMException):\n code=NAMESPACE_ERR\n \nclass InvalidAccessErr(DOMException):\n code=INVALID_ACCESS_ERR\n \nclass ValidationErr(DOMException):\n code=VALIDATION_ERR\n \nclass UserDataHandler:\n ''\n \n \n \n NODE_CLONED=1\n NODE_IMPORTED=2\n NODE_DELETED=3\n NODE_RENAMED=4\n \nXML_NAMESPACE=\"http://www.w3.org/XML/1998/namespace\"\nXMLNS_NAMESPACE=\"http://www.w3.org/2000/xmlns/\"\nXHTML_NAMESPACE=\"http://www.w3.org/1999/xhtml\"\nEMPTY_NAMESPACE=None\nEMPTY_PREFIX=None\n\nfrom .domreg import getDOMImplementation,registerDOMImplementation\n", 1], "encodings.cp775": [".py", "''\n\n\n\nimport codecs\n\n\n\nclass Codec(codecs.Codec):\n\n def encode(self,input,errors='strict'):\n return codecs.charmap_encode(input,errors,encoding_map)\n \n def decode(self,input,errors='strict'):\n return codecs.charmap_decode(input,errors,decoding_table)\n \nclass IncrementalEncoder(codecs.IncrementalEncoder):\n def encode(self,input,final=False ):\n return codecs.charmap_encode(input,self.errors,encoding_map)[0]\n \nclass IncrementalDecoder(codecs.IncrementalDecoder):\n def decode(self,input,final=False ):\n return codecs.charmap_decode(input,self.errors,decoding_table)[0]\n \nclass StreamWriter(Codec,codecs.StreamWriter):\n pass\n \nclass StreamReader(Codec,codecs.StreamReader):\n pass\n \n \n \ndef getregentry():\n return codecs.CodecInfo(\n name='cp775',\n encode=Codec().encode,\n decode=Codec().decode,\n incrementalencoder=IncrementalEncoder,\n incrementaldecoder=IncrementalDecoder,\n streamreader=StreamReader,\n streamwriter=StreamWriter,\n )\n \n \ndecoding_map=codecs.make_identity_dict(range(256))\ndecoding_map.update({\n0x0080:0x0106,\n0x0081:0x00fc,\n0x0082:0x00e9,\n0x0083:0x0101,\n0x0084:0x00e4,\n0x0085:0x0123,\n0x0086:0x00e5,\n0x0087:0x0107,\n0x0088:0x0142,\n0x0089:0x0113,\n0x008a:0x0156,\n0x008b:0x0157,\n0x008c:0x012b,\n0x008d:0x0179,\n0x008e:0x00c4,\n0x008f:0x00c5,\n0x0090:0x00c9,\n0x0091:0x00e6,\n0x0092:0x00c6,\n0x0093:0x014d,\n0x0094:0x00f6,\n0x0095:0x0122,\n0x0096:0x00a2,\n0x0097:0x015a,\n0x0098:0x015b,\n0x0099:0x00d6,\n0x009a:0x00dc,\n0x009b:0x00f8,\n0x009c:0x00a3,\n0x009d:0x00d8,\n0x009e:0x00d7,\n0x009f:0x00a4,\n0x00a0:0x0100,\n0x00a1:0x012a,\n0x00a2:0x00f3,\n0x00a3:0x017b,\n0x00a4:0x017c,\n0x00a5:0x017a,\n0x00a6:0x201d,\n0x00a7:0x00a6,\n0x00a8:0x00a9,\n0x00a9:0x00ae,\n0x00aa:0x00ac,\n0x00ab:0x00bd,\n0x00ac:0x00bc,\n0x00ad:0x0141,\n0x00ae:0x00ab,\n0x00af:0x00bb,\n0x00b0:0x2591,\n0x00b1:0x2592,\n0x00b2:0x2593,\n0x00b3:0x2502,\n0x00b4:0x2524,\n0x00b5:0x0104,\n0x00b6:0x010c,\n0x00b7:0x0118,\n0x00b8:0x0116,\n0x00b9:0x2563,\n0x00ba:0x2551,\n0x00bb:0x2557,\n0x00bc:0x255d,\n0x00bd:0x012e,\n0x00be:0x0160,\n0x00bf:0x2510,\n0x00c0:0x2514,\n0x00c1:0x2534,\n0x00c2:0x252c,\n0x00c3:0x251c,\n0x00c4:0x2500,\n0x00c5:0x253c,\n0x00c6:0x0172,\n0x00c7:0x016a,\n0x00c8:0x255a,\n0x00c9:0x2554,\n0x00ca:0x2569,\n0x00cb:0x2566,\n0x00cc:0x2560,\n0x00cd:0x2550,\n0x00ce:0x256c,\n0x00cf:0x017d,\n0x00d0:0x0105,\n0x00d1:0x010d,\n0x00d2:0x0119,\n0x00d3:0x0117,\n0x00d4:0x012f,\n0x00d5:0x0161,\n0x00d6:0x0173,\n0x00d7:0x016b,\n0x00d8:0x017e,\n0x00d9:0x2518,\n0x00da:0x250c,\n0x00db:0x2588,\n0x00dc:0x2584,\n0x00dd:0x258c,\n0x00de:0x2590,\n0x00df:0x2580,\n0x00e0:0x00d3,\n0x00e1:0x00df,\n0x00e2:0x014c,\n0x00e3:0x0143,\n0x00e4:0x00f5,\n0x00e5:0x00d5,\n0x00e6:0x00b5,\n0x00e7:0x0144,\n0x00e8:0x0136,\n0x00e9:0x0137,\n0x00ea:0x013b,\n0x00eb:0x013c,\n0x00ec:0x0146,\n0x00ed:0x0112,\n0x00ee:0x0145,\n0x00ef:0x2019,\n0x00f0:0x00ad,\n0x00f1:0x00b1,\n0x00f2:0x201c,\n0x00f3:0x00be,\n0x00f4:0x00b6,\n0x00f5:0x00a7,\n0x00f6:0x00f7,\n0x00f7:0x201e,\n0x00f8:0x00b0,\n0x00f9:0x2219,\n0x00fa:0x00b7,\n0x00fb:0x00b9,\n0x00fc:0x00b3,\n0x00fd:0x00b2,\n0x00fe:0x25a0,\n0x00ff:0x00a0,\n})\n\n\n\ndecoding_table=(\n'\\x00'\n'\\x01'\n'\\x02'\n'\\x03'\n'\\x04'\n'\\x05'\n'\\x06'\n'\\x07'\n'\\x08'\n'\\t'\n'\\n'\n'\\x0b'\n'\\x0c'\n'\\r'\n'\\x0e'\n'\\x0f'\n'\\x10'\n'\\x11'\n'\\x12'\n'\\x13'\n'\\x14'\n'\\x15'\n'\\x16'\n'\\x17'\n'\\x18'\n'\\x19'\n'\\x1a'\n'\\x1b'\n'\\x1c'\n'\\x1d'\n'\\x1e'\n'\\x1f'\n' '\n'!'\n'\"'\n'#'\n'$'\n'%'\n'&'\n\"'\"\n'('\n')'\n'*'\n'+'\n','\n'-'\n'.'\n'/'\n'0'\n'1'\n'2'\n'3'\n'4'\n'5'\n'6'\n'7'\n'8'\n'9'\n':'\n';'\n'<'\n'='\n'>'\n'?'\n'@'\n'A'\n'B'\n'C'\n'D'\n'E'\n'F'\n'G'\n'H'\n'I'\n'J'\n'K'\n'L'\n'M'\n'N'\n'O'\n'P'\n'Q'\n'R'\n'S'\n'T'\n'U'\n'V'\n'W'\n'X'\n'Y'\n'Z'\n'['\n'\\\\'\n']'\n'^'\n'_'\n'`'\n'a'\n'b'\n'c'\n'd'\n'e'\n'f'\n'g'\n'h'\n'i'\n'j'\n'k'\n'l'\n'm'\n'n'\n'o'\n'p'\n'q'\n'r'\n's'\n't'\n'u'\n'v'\n'w'\n'x'\n'y'\n'z'\n'{'\n'|'\n'}'\n'~'\n'\\x7f'\n'\\u0106'\n'\\xfc'\n'\\xe9'\n'\\u0101'\n'\\xe4'\n'\\u0123'\n'\\xe5'\n'\\u0107'\n'\\u0142'\n'\\u0113'\n'\\u0156'\n'\\u0157'\n'\\u012b'\n'\\u0179'\n'\\xc4'\n'\\xc5'\n'\\xc9'\n'\\xe6'\n'\\xc6'\n'\\u014d'\n'\\xf6'\n'\\u0122'\n'\\xa2'\n'\\u015a'\n'\\u015b'\n'\\xd6'\n'\\xdc'\n'\\xf8'\n'\\xa3'\n'\\xd8'\n'\\xd7'\n'\\xa4'\n'\\u0100'\n'\\u012a'\n'\\xf3'\n'\\u017b'\n'\\u017c'\n'\\u017a'\n'\\u201d'\n'\\xa6'\n'\\xa9'\n'\\xae'\n'\\xac'\n'\\xbd'\n'\\xbc'\n'\\u0141'\n'\\xab'\n'\\xbb'\n'\\u2591'\n'\\u2592'\n'\\u2593'\n'\\u2502'\n'\\u2524'\n'\\u0104'\n'\\u010c'\n'\\u0118'\n'\\u0116'\n'\\u2563'\n'\\u2551'\n'\\u2557'\n'\\u255d'\n'\\u012e'\n'\\u0160'\n'\\u2510'\n'\\u2514'\n'\\u2534'\n'\\u252c'\n'\\u251c'\n'\\u2500'\n'\\u253c'\n'\\u0172'\n'\\u016a'\n'\\u255a'\n'\\u2554'\n'\\u2569'\n'\\u2566'\n'\\u2560'\n'\\u2550'\n'\\u256c'\n'\\u017d'\n'\\u0105'\n'\\u010d'\n'\\u0119'\n'\\u0117'\n'\\u012f'\n'\\u0161'\n'\\u0173'\n'\\u016b'\n'\\u017e'\n'\\u2518'\n'\\u250c'\n'\\u2588'\n'\\u2584'\n'\\u258c'\n'\\u2590'\n'\\u2580'\n'\\xd3'\n'\\xdf'\n'\\u014c'\n'\\u0143'\n'\\xf5'\n'\\xd5'\n'\\xb5'\n'\\u0144'\n'\\u0136'\n'\\u0137'\n'\\u013b'\n'\\u013c'\n'\\u0146'\n'\\u0112'\n'\\u0145'\n'\\u2019'\n'\\xad'\n'\\xb1'\n'\\u201c'\n'\\xbe'\n'\\xb6'\n'\\xa7'\n'\\xf7'\n'\\u201e'\n'\\xb0'\n'\\u2219'\n'\\xb7'\n'\\xb9'\n'\\xb3'\n'\\xb2'\n'\\u25a0'\n'\\xa0'\n)\n\n\n\nencoding_map={\n0x0000:0x0000,\n0x0001:0x0001,\n0x0002:0x0002,\n0x0003:0x0003,\n0x0004:0x0004,\n0x0005:0x0005,\n0x0006:0x0006,\n0x0007:0x0007,\n0x0008:0x0008,\n0x0009:0x0009,\n0x000a:0x000a,\n0x000b:0x000b,\n0x000c:0x000c,\n0x000d:0x000d,\n0x000e:0x000e,\n0x000f:0x000f,\n0x0010:0x0010,\n0x0011:0x0011,\n0x0012:0x0012,\n0x0013:0x0013,\n0x0014:0x0014,\n0x0015:0x0015,\n0x0016:0x0016,\n0x0017:0x0017,\n0x0018:0x0018,\n0x0019:0x0019,\n0x001a:0x001a,\n0x001b:0x001b,\n0x001c:0x001c,\n0x001d:0x001d,\n0x001e:0x001e,\n0x001f:0x001f,\n0x0020:0x0020,\n0x0021:0x0021,\n0x0022:0x0022,\n0x0023:0x0023,\n0x0024:0x0024,\n0x0025:0x0025,\n0x0026:0x0026,\n0x0027:0x0027,\n0x0028:0x0028,\n0x0029:0x0029,\n0x002a:0x002a,\n0x002b:0x002b,\n0x002c:0x002c,\n0x002d:0x002d,\n0x002e:0x002e,\n0x002f:0x002f,\n0x0030:0x0030,\n0x0031:0x0031,\n0x0032:0x0032,\n0x0033:0x0033,\n0x0034:0x0034,\n0x0035:0x0035,\n0x0036:0x0036,\n0x0037:0x0037,\n0x0038:0x0038,\n0x0039:0x0039,\n0x003a:0x003a,\n0x003b:0x003b,\n0x003c:0x003c,\n0x003d:0x003d,\n0x003e:0x003e,\n0x003f:0x003f,\n0x0040:0x0040,\n0x0041:0x0041,\n0x0042:0x0042,\n0x0043:0x0043,\n0x0044:0x0044,\n0x0045:0x0045,\n0x0046:0x0046,\n0x0047:0x0047,\n0x0048:0x0048,\n0x0049:0x0049,\n0x004a:0x004a,\n0x004b:0x004b,\n0x004c:0x004c,\n0x004d:0x004d,\n0x004e:0x004e,\n0x004f:0x004f,\n0x0050:0x0050,\n0x0051:0x0051,\n0x0052:0x0052,\n0x0053:0x0053,\n0x0054:0x0054,\n0x0055:0x0055,\n0x0056:0x0056,\n0x0057:0x0057,\n0x0058:0x0058,\n0x0059:0x0059,\n0x005a:0x005a,\n0x005b:0x005b,\n0x005c:0x005c,\n0x005d:0x005d,\n0x005e:0x005e,\n0x005f:0x005f,\n0x0060:0x0060,\n0x0061:0x0061,\n0x0062:0x0062,\n0x0063:0x0063,\n0x0064:0x0064,\n0x0065:0x0065,\n0x0066:0x0066,\n0x0067:0x0067,\n0x0068:0x0068,\n0x0069:0x0069,\n0x006a:0x006a,\n0x006b:0x006b,\n0x006c:0x006c,\n0x006d:0x006d,\n0x006e:0x006e,\n0x006f:0x006f,\n0x0070:0x0070,\n0x0071:0x0071,\n0x0072:0x0072,\n0x0073:0x0073,\n0x0074:0x0074,\n0x0075:0x0075,\n0x0076:0x0076,\n0x0077:0x0077,\n0x0078:0x0078,\n0x0079:0x0079,\n0x007a:0x007a,\n0x007b:0x007b,\n0x007c:0x007c,\n0x007d:0x007d,\n0x007e:0x007e,\n0x007f:0x007f,\n0x00a0:0x00ff,\n0x00a2:0x0096,\n0x00a3:0x009c,\n0x00a4:0x009f,\n0x00a6:0x00a7,\n0x00a7:0x00f5,\n0x00a9:0x00a8,\n0x00ab:0x00ae,\n0x00ac:0x00aa,\n0x00ad:0x00f0,\n0x00ae:0x00a9,\n0x00b0:0x00f8,\n0x00b1:0x00f1,\n0x00b2:0x00fd,\n0x00b3:0x00fc,\n0x00b5:0x00e6,\n0x00b6:0x00f4,\n0x00b7:0x00fa,\n0x00b9:0x00fb,\n0x00bb:0x00af,\n0x00bc:0x00ac,\n0x00bd:0x00ab,\n0x00be:0x00f3,\n0x00c4:0x008e,\n0x00c5:0x008f,\n0x00c6:0x0092,\n0x00c9:0x0090,\n0x00d3:0x00e0,\n0x00d5:0x00e5,\n0x00d6:0x0099,\n0x00d7:0x009e,\n0x00d8:0x009d,\n0x00dc:0x009a,\n0x00df:0x00e1,\n0x00e4:0x0084,\n0x00e5:0x0086,\n0x00e6:0x0091,\n0x00e9:0x0082,\n0x00f3:0x00a2,\n0x00f5:0x00e4,\n0x00f6:0x0094,\n0x00f7:0x00f6,\n0x00f8:0x009b,\n0x00fc:0x0081,\n0x0100:0x00a0,\n0x0101:0x0083,\n0x0104:0x00b5,\n0x0105:0x00d0,\n0x0106:0x0080,\n0x0107:0x0087,\n0x010c:0x00b6,\n0x010d:0x00d1,\n0x0112:0x00ed,\n0x0113:0x0089,\n0x0116:0x00b8,\n0x0117:0x00d3,\n0x0118:0x00b7,\n0x0119:0x00d2,\n0x0122:0x0095,\n0x0123:0x0085,\n0x012a:0x00a1,\n0x012b:0x008c,\n0x012e:0x00bd,\n0x012f:0x00d4,\n0x0136:0x00e8,\n0x0137:0x00e9,\n0x013b:0x00ea,\n0x013c:0x00eb,\n0x0141:0x00ad,\n0x0142:0x0088,\n0x0143:0x00e3,\n0x0144:0x00e7,\n0x0145:0x00ee,\n0x0146:0x00ec,\n0x014c:0x00e2,\n0x014d:0x0093,\n0x0156:0x008a,\n0x0157:0x008b,\n0x015a:0x0097,\n0x015b:0x0098,\n0x0160:0x00be,\n0x0161:0x00d5,\n0x016a:0x00c7,\n0x016b:0x00d7,\n0x0172:0x00c6,\n0x0173:0x00d6,\n0x0179:0x008d,\n0x017a:0x00a5,\n0x017b:0x00a3,\n0x017c:0x00a4,\n0x017d:0x00cf,\n0x017e:0x00d8,\n0x2019:0x00ef,\n0x201c:0x00f2,\n0x201d:0x00a6,\n0x201e:0x00f7,\n0x2219:0x00f9,\n0x2500:0x00c4,\n0x2502:0x00b3,\n0x250c:0x00da,\n0x2510:0x00bf,\n0x2514:0x00c0,\n0x2518:0x00d9,\n0x251c:0x00c3,\n0x2524:0x00b4,\n0x252c:0x00c2,\n0x2534:0x00c1,\n0x253c:0x00c5,\n0x2550:0x00cd,\n0x2551:0x00ba,\n0x2554:0x00c9,\n0x2557:0x00bb,\n0x255a:0x00c8,\n0x255d:0x00bc,\n0x2560:0x00cc,\n0x2563:0x00b9,\n0x2566:0x00cb,\n0x2569:0x00ca,\n0x256c:0x00ce,\n0x2580:0x00df,\n0x2584:0x00dc,\n0x2588:0x00db,\n0x258c:0x00dd,\n0x2590:0x00de,\n0x2591:0x00b0,\n0x2592:0x00b1,\n0x2593:0x00b2,\n0x25a0:0x00fe,\n}\n"], "dis": [".js", "var $module=(function($B){\n\nvar mod = {\n dis:function(src){\n $B.$py_module_path['__main__'] = $B.brython_path\n return __BRYTHON__.py2js(src,'__main__','__main__','__builtins__').to_js()\n }\n}\nreturn mod\n\n})(__BRYTHON__)"], "sre_parse": [".py", "\n\n\n\n\n\n\n\n\n\n\"\"\"Internal support module for sre\"\"\"\n\n\n\nimport sys\n\nfrom sre_constants import *\nfrom _sre import MAXREPEAT\n\nSPECIAL_CHARS=\".\\\\[{()*+?^$|\"\nREPEAT_CHARS=\"*+?{\"\n\nDIGITS=set(\"0123456789\")\n\nOCTDIGITS=set(\"01234567\")\nHEXDIGITS=set(\"0123456789abcdefABCDEF\")\n\nWHITESPACE=set(\" \\t\\n\\r\\v\\f\")\n\nESCAPES={\nr\"\\a\":(LITERAL,ord(\"\\a\")),\nr\"\\b\":(LITERAL,ord(\"\\b\")),\nr\"\\f\":(LITERAL,ord(\"\\f\")),\nr\"\\n\":(LITERAL,ord(\"\\n\")),\nr\"\\r\":(LITERAL,ord(\"\\r\")),\nr\"\\t\":(LITERAL,ord(\"\\t\")),\nr\"\\v\":(LITERAL,ord(\"\\v\")),\nr\"\\\\\":(LITERAL,ord(\"\\\\\"))\n}\n\nCATEGORIES={\nr\"\\A\":(AT,AT_BEGINNING_STRING),\nr\"\\b\":(AT,AT_BOUNDARY),\nr\"\\B\":(AT,AT_NON_BOUNDARY),\nr\"\\d\":(IN,[(CATEGORY,CATEGORY_DIGIT)]),\nr\"\\D\":(IN,[(CATEGORY,CATEGORY_NOT_DIGIT)]),\nr\"\\s\":(IN,[(CATEGORY,CATEGORY_SPACE)]),\nr\"\\S\":(IN,[(CATEGORY,CATEGORY_NOT_SPACE)]),\nr\"\\w\":(IN,[(CATEGORY,CATEGORY_WORD)]),\nr\"\\W\":(IN,[(CATEGORY,CATEGORY_NOT_WORD)]),\nr\"\\Z\":(AT,AT_END_STRING),\n}\n\nFLAGS={\n\n\"i\":SRE_FLAG_IGNORECASE,\n\"L\":SRE_FLAG_LOCALE,\n\"m\":SRE_FLAG_MULTILINE,\n\"s\":SRE_FLAG_DOTALL,\n\"x\":SRE_FLAG_VERBOSE,\n\n\"a\":SRE_FLAG_ASCII,\n\"t\":SRE_FLAG_TEMPLATE,\n\"u\":SRE_FLAG_UNICODE,\n}\n\nclass Pattern:\n\n def __init__(self):\n self.flags=0\n self.open=[]\n self.groups=1\n self.groupdict={}\n def opengroup(self,name=None ):\n gid=self.groups\n self.groups=gid+1\n if name is not None :\n ogid=self.groupdict.get(name,None )\n if ogid is not None :\n raise error(\"redefinition of group name %s as group %d; \"\n \"was group %d\"%(repr(name),gid,ogid))\n self.groupdict[name]=gid\n self.open.append(gid)\n return gid\n def closegroup(self,gid):\n self.open.remove(gid)\n def checkgroup(self,gid):\n return gid 0:\n print(level *\" \"+\"or\")\n a.dump(level+1);nl=1\n i=i+1\n elif isinstance(av,seqtypes):\n for a in av:\n if isinstance(a,SubPattern):\n if not nl:print()\n a.dump(level+1);nl=1\n else :\n print(a,end=' ');nl=0\n else :\n print(av,end=' ');nl=0\n if not nl:print()\n def __repr__(self):\n return repr(self.data)\n def __len__(self):\n return len(self.data)\n def __delitem__(self,index):\n del self.data[index]\n def __getitem__(self,index):\n if isinstance(index,slice):\n return SubPattern(self.pattern,self.data[index])\n return self.data[index]\n def __setitem__(self,index,code):\n self.data[index]=code\n def insert(self,index,code):\n self.data.insert(index,code)\n def append(self,code):\n self.data.append(code)\n def getwidth(self):\n \n if self.width:\n return self.width\n lo=hi=0\n UNITCODES=(ANY,RANGE,IN,LITERAL,NOT_LITERAL,CATEGORY)\n REPEATCODES=(MIN_REPEAT,MAX_REPEAT)\n for op,av in self.data:\n if op is BRANCH:\n i=sys.maxsize\n j=0\n for av in av[1]:\n l,h=av.getwidth()\n i=min(i,l)\n j=max(j,h)\n lo=lo+i\n hi=hi+j\n elif op is CALL:\n i,j=av.getwidth()\n lo=lo+i\n hi=hi+j\n elif op is SUBPATTERN:\n i,j=av[1].getwidth()\n lo=lo+i\n hi=hi+j\n elif op in REPEATCODES:\n i,j=av[2].getwidth()\n lo=lo+int(i)*av[0]\n hi=hi+int(j)*av[1]\n elif op in UNITCODES:\n lo=lo+1\n hi=hi+1\n elif op ==SUCCESS:\n break\n self.width=int(min(lo,sys.maxsize)),int(min(hi,sys.maxsize))\n return self.width\n \nclass Tokenizer:\n def __init__(self,string):\n self.istext=isinstance(string,str)\n self.string=string\n self.index=0\n self.__next()\n def __next(self):\n if self.index >=len(self.string):\n self.next=None\n return\n char=self.string[self.index:self.index+1]\n \n \n if char and not self.istext:\n char=chr(char[0])\n if char ==\"\\\\\":\n try :\n c=self.string[self.index+1]\n except IndexError:\n raise error(\"bogus escape (end of line)\")\n if not self.istext:\n c=chr(c)\n char=char+c\n self.index=self.index+len(char)\n self.next=char\n def match(self,char,skip=1):\n if char ==self.next:\n if skip:\n self.__next()\n return 1\n return 0\n def get(self):\n this=self.next\n self.__next()\n return this\n def getwhile(self,n,charset):\n result=''\n for _ in range(n):\n c=self.next\n if c not in charset:\n break\n result +=c\n self.__next()\n return result\n def tell(self):\n return self.index,self.next\n def seek(self,index):\n self.index,self.next=index\n \ndef isident(char):\n return\"a\"<=char <=\"z\"or\"A\"<=char <=\"Z\"or char ==\"_\"\n \ndef isdigit(char):\n return\"0\"<=char <=\"9\"\n \ndef isname(name):\n\n if not isident(name[0]):\n return False\n for char in name[1:]:\n if not isident(char)and not isdigit(char):\n return False\n return True\n \ndef _class_escape(source,escape):\n\n code=ESCAPES.get(escape)\n if code:\n return code\n code=CATEGORIES.get(escape)\n if code and code[0]==IN:\n return code\n try :\n c=escape[1:2]\n if c ==\"x\":\n \n escape +=source.getwhile(2,HEXDIGITS)\n if len(escape)!=4:\n raise ValueError\n return LITERAL,int(escape[2:],16)&0xff\n elif c ==\"u\"and source.istext:\n \n escape +=source.getwhile(4,HEXDIGITS)\n if len(escape)!=6:\n raise ValueError\n return LITERAL,int(escape[2:],16)\n elif c ==\"U\"and source.istext:\n \n escape +=source.getwhile(8,HEXDIGITS)\n if len(escape)!=10:\n raise ValueError\n c=int(escape[2:],16)\n chr(c)\n return LITERAL,c\n elif c in OCTDIGITS:\n \n escape +=source.getwhile(2,OCTDIGITS)\n return LITERAL,int(escape[1:],8)&0xff\n elif c in DIGITS:\n raise ValueError\n if len(escape)==2:\n return LITERAL,ord(escape[1])\n except ValueError:\n pass\n raise error(\"bogus escape: %s\"%repr(escape))\n \ndef _escape(source,escape,state):\n\n code=CATEGORIES.get(escape)\n if code:\n return code\n code=ESCAPES.get(escape)\n if code:\n return code\n try :\n c=escape[1:2]\n if c ==\"x\":\n \n escape +=source.getwhile(2,HEXDIGITS)\n if len(escape)!=4:\n raise ValueError\n return LITERAL,int(escape[2:],16)&0xff\n elif c ==\"u\"and source.istext:\n \n escape +=source.getwhile(4,HEXDIGITS)\n if len(escape)!=6:\n raise ValueError\n return LITERAL,int(escape[2:],16)\n elif c ==\"U\"and source.istext:\n \n escape +=source.getwhile(8,HEXDIGITS)\n if len(escape)!=10:\n raise ValueError\n c=int(escape[2:],16)\n chr(c)\n return LITERAL,c\n elif c ==\"0\":\n \n escape +=source.getwhile(2,OCTDIGITS)\n return LITERAL,int(escape[1:],8)&0xff\n elif c in DIGITS:\n \n if source.next in DIGITS:\n escape=escape+source.get()\n if (escape[1]in OCTDIGITS and escape[2]in OCTDIGITS and\n source.next in OCTDIGITS):\n \n escape=escape+source.get()\n return LITERAL,int(escape[1:],8)&0xff\n \n group=int(escape[1:])\n if group =MAXREPEAT:\n raise OverflowError(\"the repetition number is too large\")\n if hi:\n max=int(hi)\n if max >=MAXREPEAT:\n raise OverflowError(\"the repetition number is too large\")\n if max \":\n break\n name=name+char\n group=1\n if not name:\n raise error(\"missing group name\")\n if not isname(name):\n raise error(\"bad character in group name\")\n elif sourcematch(\"=\"):\n \n name=\"\"\n while 1:\n char=sourceget()\n if char is None :\n raise error(\"unterminated name\")\n if char ==\")\":\n break\n name=name+char\n if not name:\n raise error(\"missing group name\")\n if not isname(name):\n raise error(\"bad character in group name\")\n gid=state.groupdict.get(name)\n if gid is None :\n raise error(\"unknown group name\")\n subpatternappend((GROUPREF,gid))\n continue\n else :\n char=sourceget()\n if char is None :\n raise error(\"unexpected end of pattern\")\n raise error(\"unknown specifier: ?P%s\"%char)\n elif sourcematch(\":\"):\n \n group=2\n elif sourcematch(\"#\"):\n \n while 1:\n if source.next is None or source.next ==\")\":\n break\n sourceget()\n if not sourcematch(\")\"):\n raise error(\"unbalanced parenthesis\")\n continue\n elif source.next in ASSERTCHARS:\n \n char=sourceget()\n dir=1\n if char ==\"<\":\n if source.next not in LOOKBEHINDASSERTCHARS:\n raise error(\"syntax error\")\n dir=-1\n char=sourceget()\n p=_parse_sub(source,state)\n if not sourcematch(\")\"):\n raise error(\"unbalanced parenthesis\")\n if char ==\"=\":\n subpatternappend((ASSERT,(dir,p)))\n else :\n subpatternappend((ASSERT_NOT,(dir,p)))\n continue\n elif sourcematch(\"(\"):\n \n condname=\"\"\n while 1:\n char=sourceget()\n if char is None :\n raise error(\"unterminated name\")\n if char ==\")\":\n break\n condname=condname+char\n group=2\n if not condname:\n raise error(\"missing group name\")\n if isname(condname):\n condgroup=state.groupdict.get(condname)\n if condgroup is None :\n raise error(\"unknown group name\")\n else :\n try :\n condgroup=int(condname)\n except ValueError:\n raise error(\"bad character in group name\")\n else :\n \n if not source.next in FLAGS:\n raise error(\"unexpected end of pattern\")\n while source.next in FLAGS:\n state.flags=state.flags |FLAGS[sourceget()]\n if group:\n \n if group ==2:\n \n group=None\n else :\n group=state.opengroup(name)\n if condgroup:\n p=_parse_sub_cond(source,state,condgroup)\n else :\n p=_parse_sub(source,state)\n if not sourcematch(\")\"):\n raise error(\"unbalanced parenthesis\")\n if group is not None :\n state.closegroup(group)\n subpatternappend((SUBPATTERN,(group,p)))\n else :\n while 1:\n char=sourceget()\n if char is None :\n raise error(\"unexpected end of pattern\")\n if char ==\")\":\n break\n raise error(\"unknown extension\")\n \n elif this ==\"^\":\n subpatternappend((AT,AT_BEGINNING))\n \n elif this ==\"$\":\n subpattern.append((AT,AT_END))\n \n elif this and this[0]==\"\\\\\":\n code=_escape(source,this,state)\n subpatternappend(code)\n \n else :\n raise error(\"parser error\")\n \n return subpattern\n \ndef fix_flags(src,flags):\n\n if isinstance(src,str):\n if not flags&SRE_FLAG_ASCII:\n flags |=SRE_FLAG_UNICODE\n elif flags&SRE_FLAG_UNICODE:\n raise ValueError(\"ASCII and UNICODE flags are incompatible\")\n else :\n if flags&SRE_FLAG_UNICODE:\n raise ValueError(\"can't use UNICODE flag with a bytes pattern\")\n return flags\n \ndef parse(str,flags=0,pattern=None ):\n\n source=Tokenizer(str)\n \n if pattern is None :\n pattern=Pattern()\n pattern.flags=flags\n pattern.str=str\n p=_parse_sub(source,pattern,0)\n p.pattern.flags=fix_flags(str,p.pattern.flags)\n \n tail=source.get()\n if tail ==\")\":\n raise error(\"unbalanced parenthesis\")\n elif tail:\n raise error(\"bogus characters at end of regular expression\")\n \n if flags&SRE_FLAG_DEBUG:\n p.dump()\n \n if not (flags&SRE_FLAG_VERBOSE)and p.pattern.flags&SRE_FLAG_VERBOSE:\n \n \n return parse(str,p.pattern.flags)\n \n return p\n \ndef parse_template(source,pattern):\n\n\n s=Tokenizer(source)\n sget=s.get\n p=[]\n a=p.append\n def literal(literal,p=p,pappend=a):\n if p and p[-1][0]is LITERAL:\n p[-1]=LITERAL,p[-1][1]+literal\n else :\n pappend((LITERAL,literal))\n sep=source[:0]\n if isinstance(sep,str):\n makechar=chr\n else :\n makechar=chr\n while 1:\n this=sget()\n if this is None :\n break\n if this and this[0]==\"\\\\\":\n \n c=this[1:2]\n if c ==\"g\":\n name=\"\"\n if s.match(\"<\"):\n while 1:\n char=sget()\n if char is None :\n raise error(\"unterminated group name\")\n if char ==\">\":\n break\n name=name+char\n if not name:\n raise error(\"missing group name\")\n try :\n index=int(name)\n if index <0:\n raise error(\"negative group number\")\n except ValueError:\n if not isname(name):\n raise error(\"bad character in group name\")\n try :\n index=pattern.groupindex[name]\n except KeyError:\n raise IndexError(\"unknown group name\")\n a((MARK,index))\n elif c ==\"0\":\n if s.next in OCTDIGITS:\n this=this+sget()\n if s.next in OCTDIGITS:\n this=this+sget()\n literal(makechar(int(this[1:],8)&0xff))\n elif c in DIGITS:\n isoctal=False\n if s.next in DIGITS:\n this=this+sget()\n if (c in OCTDIGITS and this[2]in OCTDIGITS and\n s.next in OCTDIGITS):\n this=this+sget()\n isoctal=True\n literal(makechar(int(this[1:],8)&0xff))\n if not isoctal:\n a((MARK,int(this[1:])))\n else :\n try :\n this=makechar(ESCAPES[this][1])\n except KeyError:\n pass\n literal(this)\n else :\n literal(this)\n \n i=0\n groups=[]\n groupsappend=groups.append\n literals=[None ]*len(p)\n if isinstance(source,str):\n encode=lambda x:x\n else :\n \n \n encode=lambda x:x.encode('latin-1')\n for c,s in p:\n if c is MARK:\n groupsappend((i,s))\n \n else :\n literals[i]=encode(s)\n i=i+1\n return groups,literals\n \ndef expand_template(template,match):\n g=match.group\n sep=match.string[:0]\n groups,literals=template\n literals=literals[:]\n try :\n for index,group in groups:\n literals[index]=s=g(group)\n if s is None :\n raise error(\"unmatched group\")\n except IndexError:\n raise error(\"invalid group reference\")\n return sep.join(literals)\n \n \n"], "formatter": [".py", "''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\nimport sys\n\n\nAS_IS=None\n\n\nclass NullFormatter:\n ''\n\n\n\n\n\n\n\n \n \n def __init__(self,writer=None ):\n if writer is None :\n writer=NullWriter()\n self.writer=writer\n def end_paragraph(self,blankline):pass\n def add_line_break(self):pass\n def add_hor_rule(self,*args,**kw):pass\n def add_label_data(self,format,counter,blankline=None ):pass\n def add_flowing_data(self,data):pass\n def add_literal_data(self,data):pass\n def flush_softspace(self):pass\n def push_alignment(self,align):pass\n def pop_alignment(self):pass\n def push_font(self,x):pass\n def pop_font(self):pass\n def push_margin(self,margin):pass\n def pop_margin(self):pass\n def set_spacing(self,spacing):pass\n def push_style(self,*styles):pass\n def pop_style(self,n=1):pass\n def assert_line_data(self,flag=1):pass\n \n \nclass AbstractFormatter:\n ''\n\n\n\n\n\n \n \n \n \n \n \n \n def __init__(self,writer):\n self.writer=writer\n self.align=None\n self.align_stack=[]\n self.font_stack=[]\n self.margin_stack=[]\n self.spacing=None\n self.style_stack=[]\n self.nospace=1\n self.softspace=0\n self.para_end=1\n self.parskip=0\n self.hard_break=1\n self.have_label=0\n \n def end_paragraph(self,blankline):\n if not self.hard_break:\n self.writer.send_line_break()\n self.have_label=0\n if self.parskip 0:\n label=label+self.format_letter(c,counter)\n elif c in'iI':\n if counter >0:\n label=label+self.format_roman(c,counter)\n else :\n label=label+c\n return label\n \n def format_letter(self,case,counter):\n label=''\n while counter >0:\n counter,x=divmod(counter -1,26)\n \n \n \n s=chr(ord(case)+x)\n label=s+label\n return label\n \n def format_roman(self,case,counter):\n ones=['i','x','c','m']\n fives=['v','l','d']\n label,index='',0\n \n while counter >0:\n counter,x=divmod(counter,10)\n if x ==9:\n label=ones[index]+ones[index+1]+label\n elif x ==4:\n label=ones[index]+fives[index]+label\n else :\n if x >=5:\n s=fives[index]\n x=x -5\n else :\n s=''\n s=s+ones[index]*x\n label=s+label\n index=index+1\n if case =='I':\n return label.upper()\n return label\n \n def add_flowing_data(self,data):\n if not data:return\n prespace=data[:1].isspace()\n postspace=data[-1:].isspace()\n data=\" \".join(data.split())\n if self.nospace and not data:\n return\n elif prespace or self.softspace:\n if not data:\n if not self.nospace:\n self.softspace=1\n self.parskip=0\n return\n if not self.nospace:\n data=' '+data\n self.hard_break=self.nospace=self.para_end= self.parskip=self.have_label=0\n self.softspace=postspace\n self.writer.send_flowing_data(data)\n \n def add_literal_data(self,data):\n if not data:return\n if self.softspace:\n self.writer.send_flowing_data(\" \")\n self.hard_break=data[-1:]=='\\n'\n self.nospace=self.para_end=self.softspace= self.parskip=self.have_label=0\n self.writer.send_literal_data(data)\n \n def flush_softspace(self):\n if self.softspace:\n self.hard_break=self.para_end=self.parskip= self.have_label=self.softspace=0\n self.nospace=1\n self.writer.send_flowing_data(' ')\n \n def push_alignment(self,align):\n if align and align !=self.align:\n self.writer.new_alignment(align)\n self.align=align\n self.align_stack.append(align)\n else :\n self.align_stack.append(self.align)\n \n def pop_alignment(self):\n if self.align_stack:\n del self.align_stack[-1]\n if self.align_stack:\n self.align=align=self.align_stack[-1]\n self.writer.new_alignment(align)\n else :\n self.align=None\n self.writer.new_alignment(None )\n \n def push_font(self,font):\n size,i,b,tt=font\n if self.softspace:\n self.hard_break=self.para_end=self.softspace=0\n self.nospace=1\n self.writer.send_flowing_data(' ')\n if self.font_stack:\n csize,ci,cb,ctt=self.font_stack[-1]\n if size is AS_IS:size=csize\n if i is AS_IS:i=ci\n if b is AS_IS:b=cb\n if tt is AS_IS:tt=ctt\n font=(size,i,b,tt)\n self.font_stack.append(font)\n self.writer.new_font(font)\n \n def pop_font(self):\n if self.font_stack:\n del self.font_stack[-1]\n if self.font_stack:\n font=self.font_stack[-1]\n else :\n font=None\n self.writer.new_font(font)\n \n def push_margin(self,margin):\n self.margin_stack.append(margin)\n fstack=[m for m in self.margin_stack if m]\n if not margin and fstack:\n margin=fstack[-1]\n self.writer.new_margin(margin,len(fstack))\n \n def pop_margin(self):\n if self.margin_stack:\n del self.margin_stack[-1]\n fstack=[m for m in self.margin_stack if m]\n if fstack:\n margin=fstack[-1]\n else :\n margin=None\n self.writer.new_margin(margin,len(fstack))\n \n def set_spacing(self,spacing):\n self.spacing=spacing\n self.writer.new_spacing(spacing)\n \n def push_style(self,*styles):\n if self.softspace:\n self.hard_break=self.para_end=self.softspace=0\n self.nospace=1\n self.writer.send_flowing_data(' ')\n for style in styles:\n self.style_stack.append(style)\n self.writer.new_styles(tuple(self.style_stack))\n \n def pop_style(self,n=1):\n del self.style_stack[-n:]\n self.writer.new_styles(tuple(self.style_stack))\n \n def assert_line_data(self,flag=1):\n self.nospace=self.hard_break=not flag\n self.para_end=self.parskip=self.have_label=0\n \n \nclass NullWriter:\n ''\n\n\n\n\n\n \n def __init__(self):pass\n def flush(self):pass\n def new_alignment(self,align):pass\n def new_font(self,font):pass\n def new_margin(self,margin,level):pass\n def new_spacing(self,spacing):pass\n def new_styles(self,styles):pass\n def send_paragraph(self,blankline):pass\n def send_line_break(self):pass\n def send_hor_rule(self,*args,**kw):pass\n def send_label_data(self,data):pass\n def send_flowing_data(self,data):pass\n def send_literal_data(self,data):pass\n \n \nclass AbstractWriter(NullWriter):\n ''\n\n\n\n\n \n \n def new_alignment(self,align):\n print(\"new_alignment(%r)\"%(align,))\n \n def new_font(self,font):\n print(\"new_font(%r)\"%(font,))\n \n def new_margin(self,margin,level):\n print(\"new_margin(%r, %d)\"%(margin,level))\n \n def new_spacing(self,spacing):\n print(\"new_spacing(%r)\"%(spacing,))\n \n def new_styles(self,styles):\n print(\"new_styles(%r)\"%(styles,))\n \n def send_paragraph(self,blankline):\n print(\"send_paragraph(%r)\"%(blankline,))\n \n def send_line_break(self):\n print(\"send_line_break()\")\n \n def send_hor_rule(self,*args,**kw):\n print(\"send_hor_rule()\")\n \n def send_label_data(self,data):\n print(\"send_label_data(%r)\"%(data,))\n \n def send_flowing_data(self,data):\n print(\"send_flowing_data(%r)\"%(data,))\n \n def send_literal_data(self,data):\n print(\"send_literal_data(%r)\"%(data,))\n \n \nclass DumbWriter(NullWriter):\n ''\n\n\n\n\n\n \n \n def __init__(self,file=None ,maxcol=72):\n self.file=file or sys.stdout\n self.maxcol=maxcol\n NullWriter.__init__(self)\n self.reset()\n \n def reset(self):\n self.col=0\n self.atbreak=0\n \n def send_paragraph(self,blankline):\n self.file.write('\\n'*blankline)\n self.col=0\n self.atbreak=0\n \n def send_line_break(self):\n self.file.write('\\n')\n self.col=0\n self.atbreak=0\n \n def send_hor_rule(self,*args,**kw):\n self.file.write('\\n')\n self.file.write('-'*self.maxcol)\n self.file.write('\\n')\n self.col=0\n self.atbreak=0\n \n def send_literal_data(self,data):\n self.file.write(data)\n i=data.rfind('\\n')\n if i >=0:\n self.col=0\n data=data[i+1:]\n data=data.expandtabs()\n self.col=self.col+len(data)\n self.atbreak=0\n \n def send_flowing_data(self,data):\n if not data:return\n atbreak=self.atbreak or data[0].isspace()\n col=self.col\n maxcol=self.maxcol\n write=self.file.write\n for word in data.split():\n if atbreak:\n if col+len(word)>=maxcol:\n write('\\n')\n col=0\n else :\n write(' ')\n col=col+1\n write(word)\n col=col+len(word)\n atbreak=1\n self.col=col\n self.atbreak=data[-1].isspace()\n \n \ndef test(file=None ):\n w=DumbWriter()\n f=AbstractFormatter(w)\n if file is not None :\n fp=open(file)\n elif sys.argv[1:]:\n fp=open(sys.argv[1])\n else :\n fp=sys.stdin\n for line in fp:\n if line =='\\n':\n f.end_paragraph(1)\n else :\n f.add_flowing_data(line)\n f.end_paragraph(0)\n \n \nif __name__ =='__main__':\n test()\n"], "importlib.util": [".py", "''\n\nfrom ._bootstrap import module_for_loader\nfrom ._bootstrap import set_loader\nfrom ._bootstrap import set_package\nfrom ._bootstrap import _resolve_name\n\n\ndef resolve_name(name,package):\n ''\n if not name.startswith('.'):\n return name\n elif not package:\n raise ValueError('{!r} is not a relative name '\n '(no leading dot)'.format(name))\n level=0\n for character in name:\n if character !='.':\n break\n level +=1\n return _resolve_name(name[level:],package,level)\n"], "weakref": [".py", "''\n\n\n\n\n\n\n\n\n\n\nfrom _weakref import (\ngetweakrefcount,\ngetweakrefs,\nref,\nproxy,\nCallableProxyType,\nProxyType,\nReferenceType)\n\nfrom _weakrefset import WeakSet,_IterationGuard\n\nimport collections\n\nProxyTypes=(ProxyType,CallableProxyType)\n\n__all__=[\"ref\",\"proxy\",\"getweakrefcount\",\"getweakrefs\",\n\"WeakKeyDictionary\",\"ReferenceType\",\"ProxyType\",\n\"CallableProxyType\",\"ProxyTypes\",\"WeakValueDictionary\",\n\"WeakSet\"]\n\n\nclass WeakValueDictionary(collections.MutableMapping):\n ''\n\n\n\n \n \n \n \n \n \n \n def __init__(self,*args,**kw):\n def remove(wr,selfref=ref(self)):\n self=selfref()\n if self is not None :\n if self._iterating:\n self._pending_removals.append(wr.key)\n else :\n del self.data[wr.key]\n self._remove=remove\n \n self._pending_removals=[]\n self._iterating=set()\n self.data=d={}\n self.update(*args,**kw)\n \n def _commit_removals(self):\n l=self._pending_removals\n d=self.data\n \n \n while l:\n del d[l.pop()]\n \n def __getitem__(self,key):\n o=self.data[key]()\n if o is None :\n raise KeyError(key)\n else :\n return o\n \n def __delitem__(self,key):\n if self._pending_removals:\n self._commit_removals()\n del self.data[key]\n \n def __len__(self):\n return len(self.data)-len(self._pending_removals)\n \n def __contains__(self,key):\n try :\n o=self.data[key]()\n except KeyError:\n return False\n return o is not None\n \n def __repr__(self):\n return\"\"%id(self)\n \n def __setitem__(self,key,value):\n if self._pending_removals:\n self._commit_removals()\n self.data[key]=KeyedRef(value,self._remove,key)\n \n def copy(self):\n new=WeakValueDictionary()\n for key,wr in self.data.items():\n o=wr()\n if o is not None :\n new[key]=o\n return new\n \n __copy__=copy\n \n def __deepcopy__(self,memo):\n from copy import deepcopy\n new=self.__class__()\n for key,wr in self.data.items():\n o=wr()\n if o is not None :\n new[deepcopy(key,memo)]=o\n return new\n \n def get(self,key,default=None ):\n try :\n wr=self.data[key]\n except KeyError:\n return default\n else :\n o=wr()\n if o is None :\n \n return default\n else :\n return o\n \n def items(self):\n with _IterationGuard(self):\n for k,wr in self.data.items():\n v=wr()\n if v is not None :\n yield k,v\n \n def keys(self):\n with _IterationGuard(self):\n for k,wr in self.data.items():\n if wr()is not None :\n yield k\n \n __iter__=keys\n \n def itervaluerefs(self):\n ''\n\n\n\n\n\n\n\n \n with _IterationGuard(self):\n for wr in self.data.values():\n yield wr\n \n def values(self):\n with _IterationGuard(self):\n for wr in self.data.values():\n obj=wr()\n if obj is not None :\n yield obj\n \n def popitem(self):\n if self._pending_removals:\n self._commit_removals()\n while True :\n key,wr=self.data.popitem()\n o=wr()\n if o is not None :\n return key,o\n \n def pop(self,key,*args):\n if self._pending_removals:\n self._commit_removals()\n try :\n o=self.data.pop(key)()\n except KeyError:\n if args:\n return args[0]\n raise\n if o is None :\n raise KeyError(key)\n else :\n return o\n \n def setdefault(self,key,default=None ):\n try :\n wr=self.data[key]\n except KeyError:\n if self._pending_removals:\n self._commit_removals()\n self.data[key]=KeyedRef(default,self._remove,key)\n return default\n else :\n return wr()\n \n def update(self,dict=None ,**kwargs):\n if self._pending_removals:\n self._commit_removals()\n d=self.data\n if dict is not None :\n if not hasattr(dict,\"items\"):\n dict=type({})(dict)\n for key,o in dict.items():\n d[key]=KeyedRef(o,self._remove,key)\n if len(kwargs):\n self.update(kwargs)\n \n def valuerefs(self):\n ''\n\n\n\n\n\n\n\n \n return list(self.data.values())\n \n \nclass KeyedRef(ref):\n ''\n\n\n\n\n\n\n \n \n __slots__=\"key\",\n \n def __new__(type,ob,callback,key):\n self=ref.__new__(type,ob,callback)\n self.key=key\n return self\n \n def __init__(self,ob,callback,key):\n super().__init__(ob,callback)\n \n \nclass WeakKeyDictionary(collections.MutableMapping):\n ''\n\n\n\n\n\n\n\n \n \n def __init__(self,dict=None ):\n self.data={}\n def remove(k,selfref=ref(self)):\n self=selfref()\n if self is not None :\n if self._iterating:\n self._pending_removals.append(k)\n else :\n del self.data[k]\n self._remove=remove\n \n self._pending_removals=[]\n self._iterating=set()\n if dict is not None :\n self.update(dict)\n \n def _commit_removals(self):\n \n \n \n \n l=self._pending_removals\n d=self.data\n while l:\n try :\n del d[l.pop()]\n except KeyError:\n pass\n \n def __delitem__(self,key):\n del self.data[ref(key)]\n \n def __getitem__(self,key):\n return self.data[ref(key)]\n \n def __len__(self):\n return len(self.data)-len(self._pending_removals)\n \n def __repr__(self):\n return\"\"%id(self)\n \n def __setitem__(self,key,value):\n self.data[ref(key,self._remove)]=value\n \n def copy(self):\n new=WeakKeyDictionary()\n for key,value in self.data.items():\n o=key()\n if o is not None :\n new[o]=value\n return new\n \n __copy__=copy\n \n def __deepcopy__(self,memo):\n from copy import deepcopy\n new=self.__class__()\n for key,value in self.data.items():\n o=key()\n if o is not None :\n new[o]=deepcopy(value,memo)\n return new\n \n def get(self,key,default=None ):\n return self.data.get(ref(key),default)\n \n def __contains__(self,key):\n try :\n wr=ref(key)\n except TypeError:\n return False\n return wr in self.data\n \n def items(self):\n with _IterationGuard(self):\n for wr,value in self.data.items():\n key=wr()\n if key is not None :\n yield key,value\n \n def keys(self):\n with _IterationGuard(self):\n for wr in self.data:\n obj=wr()\n if obj is not None :\n yield obj\n \n __iter__=keys\n \n def values(self):\n with _IterationGuard(self):\n for wr,value in self.data.items():\n if wr()is not None :\n yield value\n \n def keyrefs(self):\n ''\n\n\n\n\n\n\n\n \n return list(self.data)\n \n def popitem(self):\n while True :\n key,value=self.data.popitem()\n o=key()\n if o is not None :\n return o,value\n \n def pop(self,key,*args):\n return self.data.pop(ref(key),*args)\n \n def setdefault(self,key,default=None ):\n return self.data.setdefault(ref(key,self._remove),default)\n \n def update(self,dict=None ,**kwargs):\n d=self.data\n if dict is not None :\n if not hasattr(dict,\"items\"):\n dict=type({})(dict)\n for key,value in dict.items():\n d[ref(key,self._remove)]=value\n if len(kwargs):\n self.update(kwargs)\n"], "browser.svg": [".py", "from _svg import *"], "asyncio.brython_events": [".py", "''\n\nimport time\n\nfrom browser import timer,websocket\n\n\nfrom .import base_events\nfrom .import transports\nfrom .import events\n\n\n__all__=['BrythonEventLoop','DefaultEventLoopPolicy','WebSocketTransport']\n\n\nclass BrythonEventLoop(base_events.BaseEventLoop):\n ''\n\n \n \n class Handle:\n def __init__(self,tm):\n self.tm=tm\n \n def cancel(self):\n timer.clear_timeout(self.tm)\n \n def __init__(self):\n super().__init__()\n \n def call_soon(self,callback,*args):\n return BrythonEventLoop.Handle(timer.set_timeout(lambda :callback(*args),1))\n \n def call_soon_threadsafe(self,callback,*args):\n return self.call_soon(callback,*args)\n \n def call_later(self,delay,callback,*args):\n return BrythonEventLoop.Handle(timer.set_timeout(lambda :callback(*args),delay *1000))\n \n def call_at(self,when,callback,*args):\n now=self.time()\n if when <=now:\n return self.call_soon(callback,*args)\n else :\n return self.call_later(when -now,callback,*args)\n pass\n \n def time(self):\n return time.time()\n \n def is_running(self):\n return True\n \n def run_forever(self):\n ''\n self._check_closed()\n while True :\n try :\n self._run_once()\n except base_events._StopError:\n break\n \n def run_in_executor(self,executor,callback,args):\n raise NotImplementedError()\n \n def set_default_executor(self,executor):\n raise NotImplementedError()\n \n def getaddrinfo(self,host,port,family,type,proto,flags):\n raise NotImplementedError()\n \n def getnameinfo(self,sockaddr,flags):\n raise NotImplementedError()\n \n def create_connection(self,protocol_factory,host,port,path,method,**kwargs):\n pass\n \n def create_datagram_endpoint(self,protocol_factory,remote_addr):\n transport=SocketTransport(remote_addr)\n protocol=protocol_factory()\n protocol.connection_made(transport)\n \n \nclass _BrythonDefaultEventLoopPolicy(events.BaseDefaultEventLoopPolicy):\n ''\n _loop_factory=BrythonEventLoop\n \n \nclass WebSocketTransport(transports.ReadTransport,transports.WriteTransport):\n def __init__(self,remote_addr,protocol,extra=None ):\n self._proto=protocol\n self._web_sock=websocket.WebSocket(remote_addr)\n self._web_sock.bind('close',lambda evt:protocol.connection_lost())\n self._web_sock.bind('open',lambda evt:protocol.connection_made(self))\n self._web_sock.bind('message',lambda evt:protocol.datagram_received(evt.data))\n \n def write(self,data):\n self._web_sock.send(data)\n \n def can_write_eof(self):\n return True\n \n def write_eof(self):\n self._web_sock.close()\n \n def pause_reading(self):\n raise NotImplementedError()\n \n def resume_reading(self):\n raise NotImplementedError()\n \nDefaultEventLoopPolicy=_BrythonDefaultEventLoopPolicy\n"], "encodings.iso8859_15": [".py", "''\n\n\n\nimport codecs\n\n\n\nclass Codec(codecs.Codec):\n\n def encode(self,input,errors='strict'):\n return codecs.charmap_encode(input,errors,encoding_table)\n \n def decode(self,input,errors='strict'):\n return codecs.charmap_decode(input,errors,decoding_table)\n \nclass IncrementalEncoder(codecs.IncrementalEncoder):\n def encode(self,input,final=False ):\n return codecs.charmap_encode(input,self.errors,encoding_table)[0]\n \nclass IncrementalDecoder(codecs.IncrementalDecoder):\n def decode(self,input,final=False ):\n return codecs.charmap_decode(input,self.errors,decoding_table)[0]\n \nclass StreamWriter(Codec,codecs.StreamWriter):\n pass\n \nclass StreamReader(Codec,codecs.StreamReader):\n pass\n \n \n \ndef getregentry():\n return codecs.CodecInfo(\n name='iso8859-15',\n encode=Codec().encode,\n decode=Codec().decode,\n incrementalencoder=IncrementalEncoder,\n incrementaldecoder=IncrementalDecoder,\n streamreader=StreamReader,\n streamwriter=StreamWriter,\n )\n \n \n \n \ndecoding_table=(\n'\\x00'\n'\\x01'\n'\\x02'\n'\\x03'\n'\\x04'\n'\\x05'\n'\\x06'\n'\\x07'\n'\\x08'\n'\\t'\n'\\n'\n'\\x0b'\n'\\x0c'\n'\\r'\n'\\x0e'\n'\\x0f'\n'\\x10'\n'\\x11'\n'\\x12'\n'\\x13'\n'\\x14'\n'\\x15'\n'\\x16'\n'\\x17'\n'\\x18'\n'\\x19'\n'\\x1a'\n'\\x1b'\n'\\x1c'\n'\\x1d'\n'\\x1e'\n'\\x1f'\n' '\n'!'\n'\"'\n'#'\n'$'\n'%'\n'&'\n\"'\"\n'('\n')'\n'*'\n'+'\n','\n'-'\n'.'\n'/'\n'0'\n'1'\n'2'\n'3'\n'4'\n'5'\n'6'\n'7'\n'8'\n'9'\n':'\n';'\n'<'\n'='\n'>'\n'?'\n'@'\n'A'\n'B'\n'C'\n'D'\n'E'\n'F'\n'G'\n'H'\n'I'\n'J'\n'K'\n'L'\n'M'\n'N'\n'O'\n'P'\n'Q'\n'R'\n'S'\n'T'\n'U'\n'V'\n'W'\n'X'\n'Y'\n'Z'\n'['\n'\\\\'\n']'\n'^'\n'_'\n'`'\n'a'\n'b'\n'c'\n'd'\n'e'\n'f'\n'g'\n'h'\n'i'\n'j'\n'k'\n'l'\n'm'\n'n'\n'o'\n'p'\n'q'\n'r'\n's'\n't'\n'u'\n'v'\n'w'\n'x'\n'y'\n'z'\n'{'\n'|'\n'}'\n'~'\n'\\x7f'\n'\\x80'\n'\\x81'\n'\\x82'\n'\\x83'\n'\\x84'\n'\\x85'\n'\\x86'\n'\\x87'\n'\\x88'\n'\\x89'\n'\\x8a'\n'\\x8b'\n'\\x8c'\n'\\x8d'\n'\\x8e'\n'\\x8f'\n'\\x90'\n'\\x91'\n'\\x92'\n'\\x93'\n'\\x94'\n'\\x95'\n'\\x96'\n'\\x97'\n'\\x98'\n'\\x99'\n'\\x9a'\n'\\x9b'\n'\\x9c'\n'\\x9d'\n'\\x9e'\n'\\x9f'\n'\\xa0'\n'\\xa1'\n'\\xa2'\n'\\xa3'\n'\\u20ac'\n'\\xa5'\n'\\u0160'\n'\\xa7'\n'\\u0161'\n'\\xa9'\n'\\xaa'\n'\\xab'\n'\\xac'\n'\\xad'\n'\\xae'\n'\\xaf'\n'\\xb0'\n'\\xb1'\n'\\xb2'\n'\\xb3'\n'\\u017d'\n'\\xb5'\n'\\xb6'\n'\\xb7'\n'\\u017e'\n'\\xb9'\n'\\xba'\n'\\xbb'\n'\\u0152'\n'\\u0153'\n'\\u0178'\n'\\xbf'\n'\\xc0'\n'\\xc1'\n'\\xc2'\n'\\xc3'\n'\\xc4'\n'\\xc5'\n'\\xc6'\n'\\xc7'\n'\\xc8'\n'\\xc9'\n'\\xca'\n'\\xcb'\n'\\xcc'\n'\\xcd'\n'\\xce'\n'\\xcf'\n'\\xd0'\n'\\xd1'\n'\\xd2'\n'\\xd3'\n'\\xd4'\n'\\xd5'\n'\\xd6'\n'\\xd7'\n'\\xd8'\n'\\xd9'\n'\\xda'\n'\\xdb'\n'\\xdc'\n'\\xdd'\n'\\xde'\n'\\xdf'\n'\\xe0'\n'\\xe1'\n'\\xe2'\n'\\xe3'\n'\\xe4'\n'\\xe5'\n'\\xe6'\n'\\xe7'\n'\\xe8'\n'\\xe9'\n'\\xea'\n'\\xeb'\n'\\xec'\n'\\xed'\n'\\xee'\n'\\xef'\n'\\xf0'\n'\\xf1'\n'\\xf2'\n'\\xf3'\n'\\xf4'\n'\\xf5'\n'\\xf6'\n'\\xf7'\n'\\xf8'\n'\\xf9'\n'\\xfa'\n'\\xfb'\n'\\xfc'\n'\\xfd'\n'\\xfe'\n'\\xff'\n)\n\n\nencoding_table=codecs.charmap_build(decoding_table)\n"], "encodings.iso8859_13": [".py", "''\n\n\n\nimport codecs\n\n\n\nclass Codec(codecs.Codec):\n\n def encode(self,input,errors='strict'):\n return codecs.charmap_encode(input,errors,encoding_table)\n \n def decode(self,input,errors='strict'):\n return codecs.charmap_decode(input,errors,decoding_table)\n \nclass IncrementalEncoder(codecs.IncrementalEncoder):\n def encode(self,input,final=False ):\n return codecs.charmap_encode(input,self.errors,encoding_table)[0]\n \nclass IncrementalDecoder(codecs.IncrementalDecoder):\n def decode(self,input,final=False ):\n return codecs.charmap_decode(input,self.errors,decoding_table)[0]\n \nclass StreamWriter(Codec,codecs.StreamWriter):\n pass\n \nclass StreamReader(Codec,codecs.StreamReader):\n pass\n \n \n \ndef getregentry():\n return codecs.CodecInfo(\n name='iso8859-13',\n encode=Codec().encode,\n decode=Codec().decode,\n incrementalencoder=IncrementalEncoder,\n incrementaldecoder=IncrementalDecoder,\n streamreader=StreamReader,\n streamwriter=StreamWriter,\n )\n \n \n \n \ndecoding_table=(\n'\\x00'\n'\\x01'\n'\\x02'\n'\\x03'\n'\\x04'\n'\\x05'\n'\\x06'\n'\\x07'\n'\\x08'\n'\\t'\n'\\n'\n'\\x0b'\n'\\x0c'\n'\\r'\n'\\x0e'\n'\\x0f'\n'\\x10'\n'\\x11'\n'\\x12'\n'\\x13'\n'\\x14'\n'\\x15'\n'\\x16'\n'\\x17'\n'\\x18'\n'\\x19'\n'\\x1a'\n'\\x1b'\n'\\x1c'\n'\\x1d'\n'\\x1e'\n'\\x1f'\n' '\n'!'\n'\"'\n'#'\n'$'\n'%'\n'&'\n\"'\"\n'('\n')'\n'*'\n'+'\n','\n'-'\n'.'\n'/'\n'0'\n'1'\n'2'\n'3'\n'4'\n'5'\n'6'\n'7'\n'8'\n'9'\n':'\n';'\n'<'\n'='\n'>'\n'?'\n'@'\n'A'\n'B'\n'C'\n'D'\n'E'\n'F'\n'G'\n'H'\n'I'\n'J'\n'K'\n'L'\n'M'\n'N'\n'O'\n'P'\n'Q'\n'R'\n'S'\n'T'\n'U'\n'V'\n'W'\n'X'\n'Y'\n'Z'\n'['\n'\\\\'\n']'\n'^'\n'_'\n'`'\n'a'\n'b'\n'c'\n'd'\n'e'\n'f'\n'g'\n'h'\n'i'\n'j'\n'k'\n'l'\n'm'\n'n'\n'o'\n'p'\n'q'\n'r'\n's'\n't'\n'u'\n'v'\n'w'\n'x'\n'y'\n'z'\n'{'\n'|'\n'}'\n'~'\n'\\x7f'\n'\\x80'\n'\\x81'\n'\\x82'\n'\\x83'\n'\\x84'\n'\\x85'\n'\\x86'\n'\\x87'\n'\\x88'\n'\\x89'\n'\\x8a'\n'\\x8b'\n'\\x8c'\n'\\x8d'\n'\\x8e'\n'\\x8f'\n'\\x90'\n'\\x91'\n'\\x92'\n'\\x93'\n'\\x94'\n'\\x95'\n'\\x96'\n'\\x97'\n'\\x98'\n'\\x99'\n'\\x9a'\n'\\x9b'\n'\\x9c'\n'\\x9d'\n'\\x9e'\n'\\x9f'\n'\\xa0'\n'\\u201d'\n'\\xa2'\n'\\xa3'\n'\\xa4'\n'\\u201e'\n'\\xa6'\n'\\xa7'\n'\\xd8'\n'\\xa9'\n'\\u0156'\n'\\xab'\n'\\xac'\n'\\xad'\n'\\xae'\n'\\xc6'\n'\\xb0'\n'\\xb1'\n'\\xb2'\n'\\xb3'\n'\\u201c'\n'\\xb5'\n'\\xb6'\n'\\xb7'\n'\\xf8'\n'\\xb9'\n'\\u0157'\n'\\xbb'\n'\\xbc'\n'\\xbd'\n'\\xbe'\n'\\xe6'\n'\\u0104'\n'\\u012e'\n'\\u0100'\n'\\u0106'\n'\\xc4'\n'\\xc5'\n'\\u0118'\n'\\u0112'\n'\\u010c'\n'\\xc9'\n'\\u0179'\n'\\u0116'\n'\\u0122'\n'\\u0136'\n'\\u012a'\n'\\u013b'\n'\\u0160'\n'\\u0143'\n'\\u0145'\n'\\xd3'\n'\\u014c'\n'\\xd5'\n'\\xd6'\n'\\xd7'\n'\\u0172'\n'\\u0141'\n'\\u015a'\n'\\u016a'\n'\\xdc'\n'\\u017b'\n'\\u017d'\n'\\xdf'\n'\\u0105'\n'\\u012f'\n'\\u0101'\n'\\u0107'\n'\\xe4'\n'\\xe5'\n'\\u0119'\n'\\u0113'\n'\\u010d'\n'\\xe9'\n'\\u017a'\n'\\u0117'\n'\\u0123'\n'\\u0137'\n'\\u012b'\n'\\u013c'\n'\\u0161'\n'\\u0144'\n'\\u0146'\n'\\xf3'\n'\\u014d'\n'\\xf5'\n'\\xf6'\n'\\xf7'\n'\\u0173'\n'\\u0142'\n'\\u015b'\n'\\u016b'\n'\\xfc'\n'\\u017c'\n'\\u017e'\n'\\u2019'\n)\n\n\nencoding_table=codecs.charmap_build(decoding_table)\n"], "encodings.mac_greek": [".py", "''\n\n\n\nimport codecs\n\n\n\nclass Codec(codecs.Codec):\n\n def encode(self,input,errors='strict'):\n return codecs.charmap_encode(input,errors,encoding_table)\n \n def decode(self,input,errors='strict'):\n return codecs.charmap_decode(input,errors,decoding_table)\n \nclass IncrementalEncoder(codecs.IncrementalEncoder):\n def encode(self,input,final=False ):\n return codecs.charmap_encode(input,self.errors,encoding_table)[0]\n \nclass IncrementalDecoder(codecs.IncrementalDecoder):\n def decode(self,input,final=False ):\n return codecs.charmap_decode(input,self.errors,decoding_table)[0]\n \nclass StreamWriter(Codec,codecs.StreamWriter):\n pass\n \nclass StreamReader(Codec,codecs.StreamReader):\n pass\n \n \n \ndef getregentry():\n return codecs.CodecInfo(\n name='mac-greek',\n encode=Codec().encode,\n decode=Codec().decode,\n incrementalencoder=IncrementalEncoder,\n incrementaldecoder=IncrementalDecoder,\n streamreader=StreamReader,\n streamwriter=StreamWriter,\n )\n \n \n \n \ndecoding_table=(\n'\\x00'\n'\\x01'\n'\\x02'\n'\\x03'\n'\\x04'\n'\\x05'\n'\\x06'\n'\\x07'\n'\\x08'\n'\\t'\n'\\n'\n'\\x0b'\n'\\x0c'\n'\\r'\n'\\x0e'\n'\\x0f'\n'\\x10'\n'\\x11'\n'\\x12'\n'\\x13'\n'\\x14'\n'\\x15'\n'\\x16'\n'\\x17'\n'\\x18'\n'\\x19'\n'\\x1a'\n'\\x1b'\n'\\x1c'\n'\\x1d'\n'\\x1e'\n'\\x1f'\n' '\n'!'\n'\"'\n'#'\n'$'\n'%'\n'&'\n\"'\"\n'('\n')'\n'*'\n'+'\n','\n'-'\n'.'\n'/'\n'0'\n'1'\n'2'\n'3'\n'4'\n'5'\n'6'\n'7'\n'8'\n'9'\n':'\n';'\n'<'\n'='\n'>'\n'?'\n'@'\n'A'\n'B'\n'C'\n'D'\n'E'\n'F'\n'G'\n'H'\n'I'\n'J'\n'K'\n'L'\n'M'\n'N'\n'O'\n'P'\n'Q'\n'R'\n'S'\n'T'\n'U'\n'V'\n'W'\n'X'\n'Y'\n'Z'\n'['\n'\\\\'\n']'\n'^'\n'_'\n'`'\n'a'\n'b'\n'c'\n'd'\n'e'\n'f'\n'g'\n'h'\n'i'\n'j'\n'k'\n'l'\n'm'\n'n'\n'o'\n'p'\n'q'\n'r'\n's'\n't'\n'u'\n'v'\n'w'\n'x'\n'y'\n'z'\n'{'\n'|'\n'}'\n'~'\n'\\x7f'\n'\\xc4'\n'\\xb9'\n'\\xb2'\n'\\xc9'\n'\\xb3'\n'\\xd6'\n'\\xdc'\n'\\u0385'\n'\\xe0'\n'\\xe2'\n'\\xe4'\n'\\u0384'\n'\\xa8'\n'\\xe7'\n'\\xe9'\n'\\xe8'\n'\\xea'\n'\\xeb'\n'\\xa3'\n'\\u2122'\n'\\xee'\n'\\xef'\n'\\u2022'\n'\\xbd'\n'\\u2030'\n'\\xf4'\n'\\xf6'\n'\\xa6'\n'\\u20ac'\n'\\xf9'\n'\\xfb'\n'\\xfc'\n'\\u2020'\n'\\u0393'\n'\\u0394'\n'\\u0398'\n'\\u039b'\n'\\u039e'\n'\\u03a0'\n'\\xdf'\n'\\xae'\n'\\xa9'\n'\\u03a3'\n'\\u03aa'\n'\\xa7'\n'\\u2260'\n'\\xb0'\n'\\xb7'\n'\\u0391'\n'\\xb1'\n'\\u2264'\n'\\u2265'\n'\\xa5'\n'\\u0392'\n'\\u0395'\n'\\u0396'\n'\\u0397'\n'\\u0399'\n'\\u039a'\n'\\u039c'\n'\\u03a6'\n'\\u03ab'\n'\\u03a8'\n'\\u03a9'\n'\\u03ac'\n'\\u039d'\n'\\xac'\n'\\u039f'\n'\\u03a1'\n'\\u2248'\n'\\u03a4'\n'\\xab'\n'\\xbb'\n'\\u2026'\n'\\xa0'\n'\\u03a5'\n'\\u03a7'\n'\\u0386'\n'\\u0388'\n'\\u0153'\n'\\u2013'\n'\\u2015'\n'\\u201c'\n'\\u201d'\n'\\u2018'\n'\\u2019'\n'\\xf7'\n'\\u0389'\n'\\u038a'\n'\\u038c'\n'\\u038e'\n'\\u03ad'\n'\\u03ae'\n'\\u03af'\n'\\u03cc'\n'\\u038f'\n'\\u03cd'\n'\\u03b1'\n'\\u03b2'\n'\\u03c8'\n'\\u03b4'\n'\\u03b5'\n'\\u03c6'\n'\\u03b3'\n'\\u03b7'\n'\\u03b9'\n'\\u03be'\n'\\u03ba'\n'\\u03bb'\n'\\u03bc'\n'\\u03bd'\n'\\u03bf'\n'\\u03c0'\n'\\u03ce'\n'\\u03c1'\n'\\u03c3'\n'\\u03c4'\n'\\u03b8'\n'\\u03c9'\n'\\u03c2'\n'\\u03c7'\n'\\u03c5'\n'\\u03b6'\n'\\u03ca'\n'\\u03cb'\n'\\u0390'\n'\\u03b0'\n'\\xad'\n)\n\n\nencoding_table=codecs.charmap_build(decoding_table)\n"], "urllib.request": [".py", "from browser import ajax\nfrom .import error\n\nclass FileIO:\n def __init__(self,data):\n self._data=data\n \n def read(self):\n return self._data\n \ndef urlopen(url,data=None ,timeout=None ):\n global result\n result=None\n \n def on_complete(req):\n global result\n if req.status ==200:\n result=req\n \n _ajax=ajax.ajax()\n _ajax.bind('complete',on_complete)\n if timeout is not None :\n _ajax.set_timeout(timeout)\n \n if data is None :\n _ajax.open('GET',url,False )\n _ajax.send()\n else :\n _ajax.open('POST',url,False )\n _ajax.send(data)\n \n if result is not None :\n if isinstance(result.text,str):\n return FileIO(result.text)\n \n return FileIO(result.text())\n raise error.HTTPError('file not found')"], "xml.etree.ElementTree": [".py", "\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n__all__=[\n\n\"Comment\",\n\"dump\",\n\"Element\",\"ElementTree\",\n\"fromstring\",\"fromstringlist\",\n\"iselement\",\"iterparse\",\n\"parse\",\"ParseError\",\n\"PI\",\"ProcessingInstruction\",\n\"QName\",\n\"SubElement\",\n\"tostring\",\"tostringlist\",\n\"TreeBuilder\",\n\"VERSION\",\n\"XML\",\"XMLID\",\n\"XMLParser\",\"XMLTreeBuilder\",\n\"register_namespace\",\n]\n\nVERSION=\"1.3.0\"\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\nimport sys\nimport re\nimport warnings\nimport io\nimport contextlib\n\nfrom .import ElementPath\n\n\n\n\n\n\n\n\n\nclass ParseError(SyntaxError):\n pass\n \n \n \n \n \n \n \n \n \n \ndef iselement(element):\n\n\n return hasattr(element,'tag')\n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \nclass Element:\n\n\n\n\n\n tag=None\n \n \n \n \n \n \n \n \n \n attrib=None\n \n \n \n \n \n \n \n text=None\n \n \n \n \n \n \n \n tail=None\n \n \n \n def __init__(self,tag,attrib={},**extra):\n if not isinstance(attrib,dict):\n raise TypeError(\"attrib must be dict, not %s\"%(\n attrib.__class__.__name__,))\n attrib=attrib.copy()\n attrib.update(extra)\n self.tag=tag\n self.attrib=attrib\n self._children=[]\n \n def __repr__(self):\n return\"\"%(repr(self.tag),id(self))\n \n \n \n \n \n \n \n \n def makeelement(self,tag,attrib):\n return self.__class__(tag,attrib)\n \n \n \n \n \n \n \n def copy(self):\n elem=self.makeelement(self.tag,self.attrib)\n elem.text=self.text\n elem.tail=self.tail\n elem[:]=self\n return elem\n \n \n \n \n \n \n \n \n def __len__(self):\n return len(self._children)\n \n def __bool__(self):\n warnings.warn(\n \"The behavior of this method will change in future versions. \"\n \"Use specific 'len(elem)' or 'elem is not None' test instead.\",\n FutureWarning,stacklevel=2\n )\n return len(self._children)!=0\n \n \n \n \n \n \n \n \n def __getitem__(self,index):\n return self._children[index]\n \n \n \n \n \n \n \n \n def __setitem__(self,index,element):\n \n \n \n \n \n self._children[index]=element\n \n \n \n \n \n \n \n def __delitem__(self,index):\n del self._children[index]\n \n \n \n \n \n \n \n \n \n def append(self,element):\n self._assert_is_element(element)\n self._children.append(element)\n \n \n \n \n \n \n \n def extend(self,elements):\n for element in elements:\n self._assert_is_element(element)\n self._children.extend(elements)\n \n \n \n \n \n \n def insert(self,index,element):\n self._assert_is_element(element)\n self._children.insert(index,element)\n \n def _assert_is_element(self,e):\n \n \n if not isinstance(e,_Element):\n raise TypeError('expected an Element, not %s'%type(e).__name__)\n \n \n \n \n \n \n \n \n \n \n \n \n def remove(self,element):\n \n self._children.remove(element)\n \n \n \n \n \n \n \n \n def getchildren(self):\n warnings.warn(\n \"This method will be removed in future versions. \"\n \"Use 'list(elem)' or iteration over elem instead.\",\n DeprecationWarning,stacklevel=2\n )\n return self._children\n \n \n \n \n \n \n \n \n \n def find(self,path,namespaces=None ):\n return ElementPath.find(self,path,namespaces)\n \n \n \n \n \n \n \n \n \n \n \n \n \n def findtext(self,path,default=None ,namespaces=None ):\n return ElementPath.findtext(self,path,default,namespaces)\n \n \n \n \n \n \n \n \n \n \n def findall(self,path,namespaces=None ):\n return ElementPath.findall(self,path,namespaces)\n \n \n \n \n \n \n \n \n \n \n def iterfind(self,path,namespaces=None ):\n return ElementPath.iterfind(self,path,namespaces)\n \n \n \n \n \n \n def clear(self):\n self.attrib.clear()\n self._children=[]\n self.text=self.tail=None\n \n \n \n \n \n \n \n \n \n \n \n def get(self,key,default=None ):\n return self.attrib.get(key,default)\n \n \n \n \n \n \n \n \n def set(self,key,value):\n self.attrib[key]=value\n \n \n \n \n \n \n \n \n \n def keys(self):\n return self.attrib.keys()\n \n \n \n \n \n \n \n \n def items(self):\n return self.attrib.items()\n \n \n \n \n \n \n \n \n \n \n \n \n \n \n def iter(self,tag=None ):\n if tag ==\"*\":\n tag=None\n if tag is None or self.tag ==tag:\n yield self\n for e in self._children:\n for e in e.iter(tag):\n yield e\n \n \n def getiterator(self,tag=None ):\n \n warnings.warn(\n \"This method will be removed in future versions. \"\n \"Use 'elem.iter()' or 'list(elem.iter())' instead.\",\n PendingDeprecationWarning,stacklevel=2\n )\n return list(self.iter(tag))\n \n \n \n \n \n \n \n \n \n def itertext(self):\n tag=self.tag\n if not isinstance(tag,str)and tag is not None :\n return\n if self.text:\n yield self.text\n for e in self:\n for s in e.itertext():\n yield s\n if e.tail:\n yield e.tail\n \n \n_Element=_ElementInterface=Element\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\ndef SubElement(parent,tag,attrib={},**extra):\n attrib=attrib.copy()\n attrib.update(extra)\n element=parent.makeelement(tag,attrib)\n parent.append(element)\n return element\n \n \n \n \n \n \n \n \n \n \n \n \n \ndef Comment(text=None ):\n element=Element(Comment)\n element.text=text\n return element\n \n \n \n \n \n \n \n \n \n \n \ndef ProcessingInstruction(target,text=None ):\n element=Element(ProcessingInstruction)\n element.text=target\n if text:\n element.text=element.text+\" \"+text\n return element\n \nPI=ProcessingInstruction\n\n\n\n\n\n\n\n\n\n\n\nclass QName:\n def __init__(self,text_or_uri,tag=None ):\n if tag:\n text_or_uri=\"{%s}%s\"%(text_or_uri,tag)\n self.text=text_or_uri\n def __str__(self):\n return self.text\n def __repr__(self):\n return''%(self.text,)\n def __hash__(self):\n return hash(self.text)\n def __le__(self,other):\n if isinstance(other,QName):\n return self.text <=other.text\n return self.text <=other\n def __lt__(self,other):\n if isinstance(other,QName):\n return self.text =other.text\n return self.text >=other\n def __gt__(self,other):\n if isinstance(other,QName):\n return self.text >other.text\n return self.text >other\n def __eq__(self,other):\n if isinstance(other,QName):\n return self.text ==other.text\n return self.text ==other\n def __ne__(self,other):\n if isinstance(other,QName):\n return self.text !=other.text\n return self.text !=other\n \n \n \n \n \n \n \n \n \n \n \n \nclass ElementTree:\n\n def __init__(self,element=None ,file=None ):\n \n self._root=element\n if file:\n self.parse(file)\n \n \n \n \n \n \n \n def getroot(self):\n return self._root\n \n \n \n \n \n \n \n \n def _setroot(self,element):\n \n self._root=element\n \n \n \n \n \n \n \n \n \n \n \n \n def parse(self,source,parser=None ):\n close_source=False\n if not hasattr(source,\"read\"):\n source=open(source,\"rb\")\n close_source=True\n try :\n if not parser:\n parser=XMLParser(target=TreeBuilder())\n while 1:\n data=source.read(65536)\n if not data:\n break\n parser.feed(data)\n self._root=parser.close()\n return self._root\n finally :\n if close_source:\n source.close()\n \n \n \n \n \n \n \n \n \n def iter(self,tag=None ):\n \n return self._root.iter(tag)\n \n \n def getiterator(self,tag=None ):\n \n warnings.warn(\n \"This method will be removed in future versions. \"\n \"Use 'tree.iter()' or 'list(tree.iter())' instead.\",\n PendingDeprecationWarning,stacklevel=2\n )\n return list(self.iter(tag))\n \n \n \n \n \n \n \n \n \n def find(self,path,namespaces=None ):\n \n if path[:1]==\"/\":\n path=\".\"+path\n warnings.warn(\n \"This search is broken in 1.3 and earlier, and will be \"\n \"fixed in a future version. If you rely on the current \"\n \"behaviour, change it to %r\"%path,\n FutureWarning,stacklevel=2\n )\n return self._root.find(path,namespaces)\n \n \n \n \n \n \n \n \n \n \n \n \n \n def findtext(self,path,default=None ,namespaces=None ):\n \n if path[:1]==\"/\":\n path=\".\"+path\n warnings.warn(\n \"This search is broken in 1.3 and earlier, and will be \"\n \"fixed in a future version. If you rely on the current \"\n \"behaviour, change it to %r\"%path,\n FutureWarning,stacklevel=2\n )\n return self._root.findtext(path,default,namespaces)\n \n \n \n \n \n \n \n \n \n \n def findall(self,path,namespaces=None ):\n \n if path[:1]==\"/\":\n path=\".\"+path\n warnings.warn(\n \"This search is broken in 1.3 and earlier, and will be \"\n \"fixed in a future version. If you rely on the current \"\n \"behaviour, change it to %r\"%path,\n FutureWarning,stacklevel=2\n )\n return self._root.findall(path,namespaces)\n \n \n \n \n \n \n \n \n \n \n \n def iterfind(self,path,namespaces=None ):\n \n if path[:1]==\"/\":\n path=\".\"+path\n warnings.warn(\n \"This search is broken in 1.3 and earlier, and will be \"\n \"fixed in a future version. If you rely on the current \"\n \"behaviour, change it to %r\"%path,\n FutureWarning,stacklevel=2\n )\n return self._root.iterfind(path,namespaces)\n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n def write(self,file_or_filename,\n encoding=None ,\n xml_declaration=None ,\n default_namespace=None ,\n method=None ):\n if not method:\n method=\"xml\"\n elif method not in _serialize:\n raise ValueError(\"unknown method %r\"%method)\n if not encoding:\n if method ==\"c14n\":\n encoding=\"utf-8\"\n else :\n encoding=\"us-ascii\"\n else :\n encoding=encoding.lower()\n with _get_writer(file_or_filename,encoding)as write:\n if method ==\"xml\"and (xml_declaration or\n (xml_declaration is None and\n encoding not in (\"utf-8\",\"us-ascii\",\"unicode\"))):\n declared_encoding=encoding\n if encoding ==\"unicode\":\n \n import locale\n declared_encoding=locale.getpreferredencoding()\n write(\"\\n\"%(\n declared_encoding,))\n if method ==\"text\":\n _serialize_text(write,self._root)\n else :\n qnames,namespaces=_namespaces(self._root,default_namespace)\n serialize=_serialize[method]\n serialize(write,self._root,qnames,namespaces)\n \n def write_c14n(self,file):\n \n return self.write(file,method=\"c14n\")\n \n \n \n \n@contextlib.contextmanager\ndef _get_writer(file_or_filename,encoding):\n\n try :\n write=file_or_filename.write\n except AttributeError:\n \n if encoding ==\"unicode\":\n file=open(file_or_filename,\"w\")\n else :\n file=open(file_or_filename,\"w\",encoding=encoding,\n errors=\"xmlcharrefreplace\")\n with file:\n yield file.write\n else :\n \n \n if encoding ==\"unicode\":\n \n yield write\n else :\n \n with contextlib.ExitStack()as stack:\n if isinstance(file_or_filename,io.BufferedIOBase):\n file=file_or_filename\n elif isinstance(file_or_filename,io.RawIOBase):\n file=io.BufferedWriter(file_or_filename)\n \n \n stack.callback(file.detach)\n else :\n \n \n file=io.BufferedIOBase()\n file.writable=lambda :True\n file.write=write\n try :\n \n \n file.seekable=file_or_filename.seekable\n file.tell=file_or_filename.tell\n except AttributeError:\n pass\n file=io.TextIOWrapper(file,\n encoding=encoding,\n errors=\"xmlcharrefreplace\",\n newline=\"\\n\")\n \n \n stack.callback(file.detach)\n yield file.write\n \ndef _namespaces(elem,default_namespace=None ):\n\n\n\n qnames={None :None }\n \n \n namespaces={}\n if default_namespace:\n namespaces[default_namespace]=\"\"\n \n def add_qname(qname):\n \n try :\n if qname[:1]==\"{\":\n uri,tag=qname[1:].rsplit(\"}\",1)\n prefix=namespaces.get(uri)\n if prefix is None :\n prefix=_namespace_map.get(uri)\n if prefix is None :\n prefix=\"ns%d\"%len(namespaces)\n if prefix !=\"xml\":\n namespaces[uri]=prefix\n if prefix:\n qnames[qname]=\"%s:%s\"%(prefix,tag)\n else :\n qnames[qname]=tag\n else :\n if default_namespace:\n \n raise ValueError(\n \"cannot use non-qualified names with \"\n \"default_namespace option\"\n )\n qnames[qname]=qname\n except TypeError:\n _raise_serialization_error(qname)\n \n \n for elem in elem.iter():\n tag=elem.tag\n if isinstance(tag,QName):\n if tag.text not in qnames:\n add_qname(tag.text)\n elif isinstance(tag,str):\n if tag not in qnames:\n add_qname(tag)\n elif tag is not None and tag is not Comment and tag is not PI:\n _raise_serialization_error(tag)\n for key,value in elem.items():\n if isinstance(key,QName):\n key=key.text\n if key not in qnames:\n add_qname(key)\n if isinstance(value,QName)and value.text not in qnames:\n add_qname(value.text)\n text=elem.text\n if isinstance(text,QName)and text.text not in qnames:\n add_qname(text.text)\n return qnames,namespaces\n \ndef _serialize_xml(write,elem,qnames,namespaces):\n tag=elem.tag\n text=elem.text\n if tag is Comment:\n write(\"\"%text)\n elif tag is ProcessingInstruction:\n write(\"\"%text)\n else :\n tag=qnames[tag]\n if tag is None :\n if text:\n write(_escape_cdata(text))\n for e in elem:\n _serialize_xml(write,e,qnames,None )\n else :\n write(\"<\"+tag)\n items=list(elem.items())\n if items or namespaces:\n if namespaces:\n for v,k in sorted(namespaces.items(),\n key=lambda x:x[1]):\n if k:\n k=\":\"+k\n write(\" xmlns%s=\\\"%s\\\"\"%(\n k,\n _escape_attrib(v)\n ))\n for k,v in sorted(items):\n if isinstance(k,QName):\n k=k.text\n if isinstance(v,QName):\n v=qnames[v.text]\n else :\n v=_escape_attrib(v)\n write(\" %s=\\\"%s\\\"\"%(qnames[k],v))\n if text or len(elem):\n write(\">\")\n if text:\n write(_escape_cdata(text))\n for e in elem:\n _serialize_xml(write,e,qnames,None )\n write(\"\")\n else :\n write(\" />\")\n if elem.tail:\n write(_escape_cdata(elem.tail))\n \nHTML_EMPTY=(\"area\",\"base\",\"basefont\",\"br\",\"col\",\"frame\",\"hr\",\n\"img\",\"input\",\"isindex\",\"link\",\"meta\",\"param\")\n\ntry :\n HTML_EMPTY=set(HTML_EMPTY)\nexcept NameError:\n pass\n \ndef _serialize_html(write,elem,qnames,namespaces):\n tag=elem.tag\n text=elem.text\n if tag is Comment:\n write(\"\"%_escape_cdata(text))\n elif tag is ProcessingInstruction:\n write(\"\"%_escape_cdata(text))\n else :\n tag=qnames[tag]\n if tag is None :\n if text:\n write(_escape_cdata(text))\n for e in elem:\n _serialize_html(write,e,qnames,None )\n else :\n write(\"<\"+tag)\n items=list(elem.items())\n if items or namespaces:\n if namespaces:\n for v,k in sorted(namespaces.items(),\n key=lambda x:x[1]):\n if k:\n k=\":\"+k\n write(\" xmlns%s=\\\"%s\\\"\"%(\n k,\n _escape_attrib(v)\n ))\n for k,v in sorted(items):\n if isinstance(k,QName):\n k=k.text\n if isinstance(v,QName):\n v=qnames[v.text]\n else :\n v=_escape_attrib_html(v)\n \n write(\" %s=\\\"%s\\\"\"%(qnames[k],v))\n write(\">\")\n tag=tag.lower()\n if text:\n if tag ==\"script\"or tag ==\"style\":\n write(text)\n else :\n write(_escape_cdata(text))\n for e in elem:\n _serialize_html(write,e,qnames,None )\n if tag not in HTML_EMPTY:\n write(\"\")\n if elem.tail:\n write(_escape_cdata(elem.tail))\n \ndef _serialize_text(write,elem):\n for part in elem.itertext():\n write(part)\n if elem.tail:\n write(elem.tail)\n \n_serialize={\n\"xml\":_serialize_xml,\n\"html\":_serialize_html,\n\"text\":_serialize_text,\n\n\n}\n\n\n\n\n\n\n\n\n\n\n\n\ndef register_namespace(prefix,uri):\n if re.match(\"ns\\d+$\",prefix):\n raise ValueError(\"Prefix format reserved for internal use\")\n for k,v in list(_namespace_map.items()):\n if k ==uri or v ==prefix:\n del _namespace_map[k]\n _namespace_map[uri]=prefix\n \n_namespace_map={\n\n\"http://www.w3.org/XML/1998/namespace\":\"xml\",\n\"http://www.w3.org/1999/xhtml\":\"html\",\n\"http://www.w3.org/1999/02/22-rdf-syntax-ns#\":\"rdf\",\n\"http://schemas.xmlsoap.org/wsdl/\":\"wsdl\",\n\n\"http://www.w3.org/2001/XMLSchema\":\"xs\",\n\"http://www.w3.org/2001/XMLSchema-instance\":\"xsi\",\n\n\"http://purl.org/dc/elements/1.1/\":\"dc\",\n}\n\nregister_namespace._namespace_map=_namespace_map\n\ndef _raise_serialization_error(text):\n raise TypeError(\n \"cannot serialize %r (type %s)\"%(text,type(text).__name__)\n )\n \ndef _escape_cdata(text):\n\n try :\n \n \n \n if\"&\"in text:\n text=text.replace(\"&\",\"&\")\n if\"<\"in text:\n text=text.replace(\"<\",\"<\")\n if\">\"in text:\n text=text.replace(\">\",\">\")\n return text\n except (TypeError,AttributeError):\n _raise_serialization_error(text)\n \ndef _escape_attrib(text):\n\n try :\n if\"&\"in text:\n text=text.replace(\"&\",\"&\")\n if\"<\"in text:\n text=text.replace(\"<\",\"<\")\n if\">\"in text:\n text=text.replace(\">\",\">\")\n if\"\\\"\"in text:\n text=text.replace(\"\\\"\",\""\")\n if\"\\n\"in text:\n text=text.replace(\"\\n\",\" \")\n return text\n except (TypeError,AttributeError):\n _raise_serialization_error(text)\n \ndef _escape_attrib_html(text):\n\n try :\n if\"&\"in text:\n text=text.replace(\"&\",\"&\")\n if\">\"in text:\n text=text.replace(\">\",\">\")\n if\"\\\"\"in text:\n text=text.replace(\"\\\"\",\""\")\n return text\n except (TypeError,AttributeError):\n _raise_serialization_error(text)\n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \ndef tostring(element,encoding=None ,method=None ):\n stream=io.StringIO()if encoding =='unicode'else io.BytesIO()\n ElementTree(element).write(stream,encoding,method=method)\n return stream.getvalue()\n \n \n \n \n \n \n \n \n \n \n \n \n \n \nclass _ListDataStream(io.BufferedIOBase):\n ''\n \n def __init__(self,lst):\n self.lst=lst\n \n def writable(self):\n return True\n \n def seekable(self):\n return True\n \n def write(self,b):\n self.lst.append(b)\n \n def tell(self):\n return len(self.lst)\n \ndef tostringlist(element,encoding=None ,method=None ):\n lst=[]\n stream=_ListDataStream(lst)\n ElementTree(element).write(stream,encoding,method=method)\n return lst\n \n \n \n \n \n \n \n \n \n \ndef dump(elem):\n\n if not isinstance(elem,ElementTree):\n elem=ElementTree(elem)\n elem.write(sys.stdout,encoding=\"unicode\")\n tail=elem.getroot().tail\n if not tail or tail[-1]!=\"\\n\":\n sys.stdout.write(\"\\n\")\n \n \n \n \n \n \n \n \n \n \n \n \ndef parse(source,parser=None ):\n tree=ElementTree()\n tree.parse(source,parser)\n return tree\n \n \n \n \n \n \n \n \n \n \n \n \ndef iterparse(source,events=None ,parser=None ):\n close_source=False\n if not hasattr(source,\"read\"):\n source=open(source,\"rb\")\n close_source=True\n if not parser:\n parser=XMLParser(target=TreeBuilder())\n return _IterParseIterator(source,events,parser,close_source)\n \nclass _IterParseIterator:\n\n def __init__(self,source,events,parser,close_source=False ):\n self._file=source\n self._close_file=close_source\n self._events=[]\n self._index=0\n self._error=None\n self.root=self._root=None\n self._parser=parser\n \n parser=self._parser._parser\n append=self._events.append\n if events is None :\n events=[\"end\"]\n for event in events:\n if event ==\"start\":\n try :\n parser.ordered_attributes=1\n parser.specified_attributes=1\n def handler(tag,attrib_in,event=event,append=append,\n start=self._parser._start_list):\n append((event,start(tag,attrib_in)))\n parser.StartElementHandler=handler\n except AttributeError:\n def handler(tag,attrib_in,event=event,append=append,\n start=self._parser._start):\n append((event,start(tag,attrib_in)))\n parser.StartElementHandler=handler\n elif event ==\"end\":\n def handler(tag,event=event,append=append,\n end=self._parser._end):\n append((event,end(tag)))\n parser.EndElementHandler=handler\n elif event ==\"start-ns\":\n def handler(prefix,uri,event=event,append=append):\n append((event,(prefix or\"\",uri or\"\")))\n parser.StartNamespaceDeclHandler=handler\n elif event ==\"end-ns\":\n def handler(prefix,event=event,append=append):\n append((event,None ))\n parser.EndNamespaceDeclHandler=handler\n else :\n raise ValueError(\"unknown event %r\"%event)\n \n def __next__(self):\n while 1:\n try :\n item=self._events[self._index]\n self._index +=1\n return item\n except IndexError:\n pass\n if self._error:\n e=self._error\n self._error=None\n raise e\n if self._parser is None :\n self.root=self._root\n if self._close_file:\n self._file.close()\n raise StopIteration\n \n del self._events[:]\n self._index=0\n data=self._file.read(16384)\n if data:\n try :\n self._parser.feed(data)\n except SyntaxError as exc:\n self._error=exc\n else :\n self._root=self._parser.close()\n self._parser=None\n \n def __iter__(self):\n return self\n \n \n \n \n \n \n \n \n \n \n \ndef XML(text,parser=None ):\n if not parser:\n parser=XMLParser(target=TreeBuilder())\n parser.feed(text)\n return parser.close()\n \n \n \n \n \n \n \n \n \n \n \ndef XMLID(text,parser=None ):\n if not parser:\n parser=XMLParser(target=TreeBuilder())\n parser.feed(text)\n tree=parser.close()\n ids={}\n for elem in tree.iter():\n id=elem.get(\"id\")\n if id:\n ids[id]=elem\n return tree,ids\n \n \n \n \n \n \n \n \n \nfromstring=XML\n\n\n\n\n\n\n\n\n\n\n\ndef fromstringlist(sequence,parser=None ):\n if not parser:\n parser=XMLParser(target=TreeBuilder())\n for text in sequence:\n parser.feed(text)\n return parser.close()\n \n \n \n \n \n \n \n \n \n \n \n \n \n \nclass TreeBuilder:\n\n def __init__(self,element_factory=None ):\n self._data=[]\n self._elem=[]\n self._last=None\n self._tail=None\n if element_factory is None :\n element_factory=Element\n self._factory=element_factory\n \n \n \n \n \n \n \n \n def close(self):\n assert len(self._elem)==0,\"missing end tags\"\n assert self._last is not None ,\"missing toplevel element\"\n return self._last\n \n def _flush(self):\n if self._data:\n if self._last is not None :\n text=\"\".join(self._data)\n if self._tail:\n assert self._last.tail is None ,\"internal error (tail)\"\n self._last.tail=text\n else :\n assert self._last.text is None ,\"internal error (text)\"\n self._last.text=text\n self._data=[]\n \n \n \n \n \n \n \n def data(self,data):\n self._data.append(data)\n \n \n \n \n \n \n \n \n \n def start(self,tag,attrs):\n self._flush()\n self._last=elem=self._factory(tag,attrs)\n if self._elem:\n self._elem[-1].append(elem)\n self._elem.append(elem)\n self._tail=0\n return elem\n \n \n \n \n \n \n \n \n def end(self,tag):\n self._flush()\n self._last=self._elem.pop()\n assert self._last.tag ==tag, \"end tag mismatch (expected %s, got %s)\"%(\n self._last.tag,tag)\n self._tail=1\n return self._last\n \n \n \n \n \n \n \n \n \n \n \n \n \n \nclass XMLParser:\n\n def __init__(self,html=0,target=None ,encoding=None ):\n try :\n from xml.parsers import expat\n except ImportError:\n try :\n import pyexpat as expat\n except ImportError:\n raise ImportError(\n \"No module named expat; use SimpleXMLTreeBuilder instead\"\n )\n parser=expat.ParserCreate(encoding,\"}\")\n if target is None :\n target=TreeBuilder()\n \n self.parser=self._parser=parser\n self.target=self._target=target\n self._error=expat.error\n self._names={}\n \n parser.DefaultHandlerExpand=self._default\n if hasattr(target,'start'):\n parser.StartElementHandler=self._start\n if hasattr(target,'end'):\n parser.EndElementHandler=self._end\n if hasattr(target,'data'):\n parser.CharacterDataHandler=target.data\n \n if hasattr(target,'comment'):\n parser.CommentHandler=target.comment\n if hasattr(target,'pi'):\n parser.ProcessingInstructionHandler=target.pi\n \n try :\n parser.buffer_text=1\n except AttributeError:\n pass\n \n try :\n parser.ordered_attributes=1\n parser.specified_attributes=1\n if hasattr(target,'start'):\n parser.StartElementHandler=self._start_list\n except AttributeError:\n pass\n self._doctype=None\n self.entity={}\n try :\n self.version=\"Expat %d.%d.%d\"%expat.version_info\n except AttributeError:\n pass\n \n def _raiseerror(self,value):\n err=ParseError(value)\n err.code=value.code\n err.position=value.lineno,value.offset\n raise err\n \n def _fixname(self,key):\n \n try :\n name=self._names[key]\n except KeyError:\n name=key\n if\"}\"in name:\n name=\"{\"+name\n self._names[key]=name\n return name\n \n def _start(self,tag,attrib_in):\n fixname=self._fixname\n tag=fixname(tag)\n attrib={}\n for key,value in attrib_in.items():\n attrib[fixname(key)]=value\n return self.target.start(tag,attrib)\n \n def _start_list(self,tag,attrib_in):\n fixname=self._fixname\n tag=fixname(tag)\n attrib={}\n if attrib_in:\n for i in range(0,len(attrib_in),2):\n attrib[fixname(attrib_in[i])]=attrib_in[i+1]\n return self.target.start(tag,attrib)\n \n def _end(self,tag):\n return self.target.end(self._fixname(tag))\n \n def _default(self,text):\n prefix=text[:1]\n if prefix ==\"&\":\n \n try :\n data_handler=self.target.data\n except AttributeError:\n return\n try :\n data_handler(self.entity[text[1:-1]])\n except KeyError:\n from xml.parsers import expat\n err=expat.error(\n \"undefined entity %s: line %d, column %d\"%\n (text,self.parser.ErrorLineNumber,\n self.parser.ErrorColumnNumber)\n )\n err.code=11\n err.lineno=self.parser.ErrorLineNumber\n err.offset=self.parser.ErrorColumnNumber\n raise err\n elif prefix ==\"<\"and text[:9]==\"\":\n self._doctype=None\n return\n text=text.strip()\n if not text:\n return\n self._doctype.append(text)\n n=len(self._doctype)\n if n >2:\n type=self._doctype[1]\n if type ==\"PUBLIC\"and n ==4:\n name,type,pubid,system=self._doctype\n if pubid:\n pubid=pubid[1:-1]\n elif type ==\"SYSTEM\"and n ==3:\n name,type,system=self._doctype\n pubid=None\n else :\n return\n if hasattr(self.target,\"doctype\"):\n self.target.doctype(name,pubid,system[1:-1])\n elif self.doctype !=self._XMLParser__doctype:\n \n self._XMLParser__doctype(name,pubid,system[1:-1])\n self.doctype(name,pubid,system[1:-1])\n self._doctype=None\n \n \n \n \n \n \n \n \n def doctype(self,name,pubid,system):\n ''\n warnings.warn(\n \"This method of XMLParser is deprecated. Define doctype() \"\n \"method on the TreeBuilder target.\",\n DeprecationWarning,\n )\n \n \n __doctype=doctype\n \n \n \n \n \n \n def feed(self,data):\n try :\n self.parser.Parse(data,0)\n except self._error as v:\n self._raiseerror(v)\n \n \n \n \n \n \n \n def close(self):\n try :\n self.parser.Parse(\"\",1)\n except self._error as v:\n self._raiseerror(v)\n try :\n close_handler=self.target.close\n except AttributeError:\n pass\n else :\n return close_handler()\n finally :\n \n del self.parser,self._parser\n del self.target,self._target\n \n \n \ntry :\n\n from _elementtree import *\nexcept ImportError:\n pass\nelse :\n\n\n class ElementTree(ElementTree):\n def parse(self,source,parser=None ):\n close_source=False\n if not hasattr(source,'read'):\n source=open(source,'rb')\n close_source=True\n try :\n if parser is not None :\n while True :\n data=source.read(65536)\n if not data:\n break\n parser.feed(data)\n self._root=parser.close()\n else :\n parser=XMLParser()\n self._root=parser._parse(source)\n return self._root\n finally :\n if close_source:\n source.close()\n \n class iterparse:\n ''\n\n\n\n\n\n\n\n\n\n \n \n root=None\n def __init__(self,file,events=None ,parser=None ):\n self._close_file=False\n if not hasattr(file,'read'):\n file=open(file,'rb')\n self._close_file=True\n self._file=file\n self._events=[]\n self._index=0\n self._error=None\n self.root=self._root=None\n if parser is None :\n parser=XMLParser(target=TreeBuilder())\n self._parser=parser\n self._parser._setevents(self._events,events)\n \n def __next__(self):\n while True :\n try :\n item=self._events[self._index]\n self._index +=1\n return item\n except IndexError:\n pass\n if self._error:\n e=self._error\n self._error=None\n raise e\n if self._parser is None :\n self.root=self._root\n if self._close_file:\n self._file.close()\n raise StopIteration\n \n del self._events[:]\n self._index=0\n data=self._file.read(16384)\n if data:\n try :\n self._parser.feed(data)\n except SyntaxError as exc:\n self._error=exc\n else :\n self._root=self._parser.close()\n self._parser=None\n \n def __iter__(self):\n return self\n \n \nXMLTreeBuilder=XMLParser\n\n\ntry :\n from ElementC14N import _serialize_c14n\n _serialize[\"c14n\"]=_serialize_c14n\nexcept ImportError:\n pass\n"], "genericpath": [".py", "''\n\n\n\n\nimport os\nimport stat\n\n__all__=['commonprefix','exists','getatime','getctime','getmtime',\n'getsize','isdir','isfile']\n\n\n\n\ndef exists(path):\n ''\n try :\n os.stat(path)\n except os.error:\n return False\n return True\n \n \n \n \ndef isfile(path):\n ''\n try :\n st=os.stat(path)\n except os.error:\n return False\n return stat.S_ISREG(st.st_mode)\n \n \n \n \n \ndef isdir(s):\n ''\n try :\n st=os.stat(s)\n except os.error:\n return False\n return stat.S_ISDIR(st.st_mode)\n \n \ndef getsize(filename):\n ''\n return os.stat(filename).st_size\n \n \ndef getmtime(filename):\n ''\n return os.stat(filename).st_mtime\n \n \ndef getatime(filename):\n ''\n return os.stat(filename).st_atime\n \n \ndef getctime(filename):\n ''\n return os.stat(filename).st_ctime\n \n \n \ndef commonprefix(m):\n ''\n if not m:return''\n s1=min(m)\n s2=max(m)\n for i,c in enumerate(s1):\n if c !=s2[i]:\n return s1[:i]\n return s1\n \n \n \n \n \n \n \n \ndef _splitext(p,sep,altsep,extsep):\n ''\n\n\n \n \n \n sepIndex=p.rfind(sep)\n if altsep:\n altsepIndex=p.rfind(altsep)\n sepIndex=max(sepIndex,altsepIndex)\n \n dotIndex=p.rfind(extsep)\n if dotIndex >sepIndex:\n \n filenameIndex=sepIndex+1\n while filenameIndex \",window.navigator.platform\n \ndef machine(*args,**kw):\n return''\n \ndef node(*args,**kw):\n return''\n \ndef platform(*args,**kw):\n return window.navigator.platform\n \ndef processor(*args,**kw):\n return''\n \ndef python_build():\n return ('.'.join(map(str,__BRYTHON__.implementation[:-1])),\n __BRYTHON__.compiled_date)\n \ndef python_compiler():\n return''\n \ndef python_branch():\n return''\n \ndef python_implementation():\n return'Brython'\n \ndef python_revision():\n return''\n \ndef python_version():\n return'.'.join(map(str,__BRYTHON__.version_info[:3]))\n \ndef python_version_tuple():\n return __BRYTHON__.version_info[:3]\n \ndef release():\n return''\n \ndef system():\n return window.navigator.platform\n \ndef system_alias(*args,**kw):\n return window.navigator.platform\n \ndef uname():\n from collections import namedtuple\n klass=namedtuple('uname_result',\n 'system node release version machine processor')\n return klass(window.navigator.platform,'','','','','')"], "xml.sax.expatreader": [".py", "''\n\n\n\n\nversion=\"0.20\"\n\nfrom xml.sax._exceptions import *\nfrom xml.sax.handler import feature_validation,feature_namespaces\nfrom xml.sax.handler import feature_namespace_prefixes\nfrom xml.sax.handler import feature_external_ges,feature_external_pes\nfrom xml.sax.handler import feature_string_interning\nfrom xml.sax.handler import property_xml_string,property_interning_dict\n\n\nimport sys\nif sys.platform[:4]==\"java\":\n raise SAXReaderNotAvailable(\"expat not available in Java\",None )\ndel sys\n\ntry :\n from xml.parsers import expat\nexcept ImportError:\n raise SAXReaderNotAvailable(\"expat not supported\",None )\nelse :\n if not hasattr(expat,\"ParserCreate\"):\n raise SAXReaderNotAvailable(\"expat not supported\",None )\nfrom xml.sax import xmlreader,saxutils,handler\n\nAttributesImpl=xmlreader.AttributesImpl\nAttributesNSImpl=xmlreader.AttributesNSImpl\n\n\n\n\ntry :\n import _weakref\nexcept ImportError:\n def _mkproxy(o):\n return o\nelse :\n import weakref\n _mkproxy=weakref.proxy\n del weakref,_weakref\n \n \n \nclass ExpatLocator(xmlreader.Locator):\n ''\n\n\n\n \n def __init__(self,parser):\n self._ref=_mkproxy(parser)\n \n def getColumnNumber(self):\n parser=self._ref\n if parser._parser is None :\n return None\n return parser._parser.ErrorColumnNumber\n \n def getLineNumber(self):\n parser=self._ref\n if parser._parser is None :\n return 1\n return parser._parser.ErrorLineNumber\n \n def getPublicId(self):\n parser=self._ref\n if parser is None :\n return None\n return parser._source.getPublicId()\n \n def getSystemId(self):\n parser=self._ref\n if parser is None :\n return None\n return parser._source.getSystemId()\n \n \n \n \nclass ExpatParser(xmlreader.IncrementalParser,xmlreader.Locator):\n ''\n \n def __init__(self,namespaceHandling=0,bufsize=2 **16 -20):\n xmlreader.IncrementalParser.__init__(self,bufsize)\n self._source=xmlreader.InputSource()\n self._parser=None\n self._namespaces=namespaceHandling\n self._lex_handler_prop=None\n self._parsing=0\n self._entity_stack=[]\n self._external_ges=1\n self._interning=None\n \n \n \n def parse(self,source):\n ''\n source=saxutils.prepare_input_source(source)\n \n self._source=source\n self.reset()\n self._cont_handler.setDocumentLocator(ExpatLocator(self))\n xmlreader.IncrementalParser.parse(self,source)\n \n def prepareParser(self,source):\n if source.getSystemId()is not None :\n self._parser.SetBase(source.getSystemId())\n \n \n \n def setContentHandler(self,handler):\n xmlreader.IncrementalParser.setContentHandler(self,handler)\n if self._parsing:\n self._reset_cont_handler()\n \n def getFeature(self,name):\n if name ==feature_namespaces:\n return self._namespaces\n elif name ==feature_string_interning:\n return self._interning is not None\n elif name in (feature_validation,feature_external_pes,\n feature_namespace_prefixes):\n return 0\n elif name ==feature_external_ges:\n return self._external_ges\n raise SAXNotRecognizedException(\"Feature '%s' not recognized\"%name)\n \n def setFeature(self,name,state):\n if self._parsing:\n raise SAXNotSupportedException(\"Cannot set features while parsing\")\n \n if name ==feature_namespaces:\n self._namespaces=state\n elif name ==feature_external_ges:\n self._external_ges=state\n elif name ==feature_string_interning:\n if state:\n if self._interning is None :\n self._interning={}\n else :\n self._interning=None\n elif name ==feature_validation:\n if state:\n raise SAXNotSupportedException(\n \"expat does not support validation\")\n elif name ==feature_external_pes:\n if state:\n raise SAXNotSupportedException(\n \"expat does not read external parameter entities\")\n elif name ==feature_namespace_prefixes:\n if state:\n raise SAXNotSupportedException(\n \"expat does not report namespace prefixes\")\n else :\n raise SAXNotRecognizedException(\n \"Feature '%s' not recognized\"%name)\n \n def getProperty(self,name):\n if name ==handler.property_lexical_handler:\n return self._lex_handler_prop\n elif name ==property_interning_dict:\n return self._interning\n elif name ==property_xml_string:\n if self._parser:\n if hasattr(self._parser,\"GetInputContext\"):\n return self._parser.GetInputContext()\n else :\n raise SAXNotRecognizedException(\n \"This version of expat does not support getting\"\n \" the XML string\")\n else :\n raise SAXNotSupportedException(\n \"XML string cannot be returned when not parsing\")\n raise SAXNotRecognizedException(\"Property '%s' not recognized\"%name)\n \n def setProperty(self,name,value):\n if name ==handler.property_lexical_handler:\n self._lex_handler_prop=value\n if self._parsing:\n self._reset_lex_handler_prop()\n elif name ==property_interning_dict:\n self._interning=value\n elif name ==property_xml_string:\n raise SAXNotSupportedException(\"Property '%s' cannot be set\"%\n name)\n else :\n raise SAXNotRecognizedException(\"Property '%s' not recognized\"%\n name)\n \n \n \n def feed(self,data,isFinal=0):\n if not self._parsing:\n self.reset()\n self._parsing=1\n self._cont_handler.startDocument()\n \n try :\n \n \n \n \n self._parser.Parse(data,isFinal)\n except expat.error as e:\n exc=SAXParseException(expat.ErrorString(e.code),e,self)\n \n self._err_handler.fatalError(exc)\n \n def close(self):\n if self._entity_stack:\n \n return\n self.feed(\"\",isFinal=1)\n self._cont_handler.endDocument()\n self._parsing=0\n \n self._parser=None\n bs=self._source.getByteStream()\n if bs is not None :\n bs.close()\n \n def _reset_cont_handler(self):\n self._parser.ProcessingInstructionHandler= self._cont_handler.processingInstruction\n self._parser.CharacterDataHandler=self._cont_handler.characters\n \n def _reset_lex_handler_prop(self):\n lex=self._lex_handler_prop\n parser=self._parser\n if lex is None :\n parser.CommentHandler=None\n parser.StartCdataSectionHandler=None\n parser.EndCdataSectionHandler=None\n parser.StartDoctypeDeclHandler=None\n parser.EndDoctypeDeclHandler=None\n else :\n parser.CommentHandler=lex.comment\n parser.StartCdataSectionHandler=lex.startCDATA\n parser.EndCdataSectionHandler=lex.endCDATA\n parser.StartDoctypeDeclHandler=self.start_doctype_decl\n parser.EndDoctypeDeclHandler=lex.endDTD\n \n def reset(self):\n if self._namespaces:\n self._parser=expat.ParserCreate(self._source.getEncoding(),\" \",\n intern=self._interning)\n self._parser.namespace_prefixes=1\n self._parser.StartElementHandler=self.start_element_ns\n self._parser.EndElementHandler=self.end_element_ns\n else :\n self._parser=expat.ParserCreate(self._source.getEncoding(),\n intern=self._interning)\n self._parser.StartElementHandler=self.start_element\n self._parser.EndElementHandler=self.end_element\n \n self._reset_cont_handler()\n self._parser.UnparsedEntityDeclHandler=self.unparsed_entity_decl\n self._parser.NotationDeclHandler=self.notation_decl\n self._parser.StartNamespaceDeclHandler=self.start_namespace_decl\n self._parser.EndNamespaceDeclHandler=self.end_namespace_decl\n \n self._decl_handler_prop=None\n if self._lex_handler_prop:\n self._reset_lex_handler_prop()\n \n \n \n self._parser.ExternalEntityRefHandler=self.external_entity_ref\n try :\n self._parser.SkippedEntityHandler=self.skipped_entity_handler\n except AttributeError:\n \n pass\n self._parser.SetParamEntityParsing(\n expat.XML_PARAM_ENTITY_PARSING_UNLESS_STANDALONE)\n \n self._parsing=0\n self._entity_stack=[]\n \n \n \n def getColumnNumber(self):\n if self._parser is None :\n return None\n return self._parser.ErrorColumnNumber\n \n def getLineNumber(self):\n if self._parser is None :\n return 1\n return self._parser.ErrorLineNumber\n \n def getPublicId(self):\n return self._source.getPublicId()\n \n def getSystemId(self):\n return self._source.getSystemId()\n \n \n def start_element(self,name,attrs):\n self._cont_handler.startElement(name,AttributesImpl(attrs))\n \n def end_element(self,name):\n self._cont_handler.endElement(name)\n \n def start_element_ns(self,name,attrs):\n pair=name.split()\n if len(pair)==1:\n \n pair=(None ,name)\n elif len(pair)==3:\n pair=pair[0],pair[1]\n else :\n \n pair=tuple(pair)\n \n newattrs={}\n qnames={}\n for (aname,value)in attrs.items():\n parts=aname.split()\n length=len(parts)\n if length ==1:\n \n qname=aname\n apair=(None ,aname)\n elif length ==3:\n qname=\"%s:%s\"%(parts[2],parts[1])\n apair=parts[0],parts[1]\n else :\n \n qname=parts[1]\n apair=tuple(parts)\n \n newattrs[apair]=value\n qnames[apair]=qname\n \n self._cont_handler.startElementNS(pair,None ,\n AttributesNSImpl(newattrs,qnames))\n \n def end_element_ns(self,name):\n pair=name.split()\n if len(pair)==1:\n pair=(None ,name)\n elif len(pair)==3:\n pair=pair[0],pair[1]\n else :\n pair=tuple(pair)\n \n self._cont_handler.endElementNS(pair,None )\n \n \n def processing_instruction(self,target,data):\n self._cont_handler.processingInstruction(target,data)\n \n \n def character_data(self,data):\n self._cont_handler.characters(data)\n \n def start_namespace_decl(self,prefix,uri):\n self._cont_handler.startPrefixMapping(prefix,uri)\n \n def end_namespace_decl(self,prefix):\n self._cont_handler.endPrefixMapping(prefix)\n \n def start_doctype_decl(self,name,sysid,pubid,has_internal_subset):\n self._lex_handler_prop.startDTD(name,pubid,sysid)\n \n def unparsed_entity_decl(self,name,base,sysid,pubid,notation_name):\n self._dtd_handler.unparsedEntityDecl(name,pubid,sysid,notation_name)\n \n def notation_decl(self,name,base,sysid,pubid):\n self._dtd_handler.notationDecl(name,pubid,sysid)\n \n def external_entity_ref(self,context,base,sysid,pubid):\n if not self._external_ges:\n return 1\n \n source=self._ent_handler.resolveEntity(pubid,sysid)\n source=saxutils.prepare_input_source(source,\n self._source.getSystemId()or\n \"\")\n \n self._entity_stack.append((self._parser,self._source))\n self._parser=self._parser.ExternalEntityParserCreate(context)\n self._source=source\n \n try :\n xmlreader.IncrementalParser.parse(self,source)\n except :\n return 0\n \n (self._parser,self._source)=self._entity_stack[-1]\n del self._entity_stack[-1]\n return 1\n \n def skipped_entity_handler(self,name,is_pe):\n if is_pe:\n \n name='%'+name\n self._cont_handler.skippedEntity(name)\n \n \n \ndef create_parser(*args,**kwargs):\n return ExpatParser(*args,**kwargs)\n \n \n \nif __name__ ==\"__main__\":\n import xml.sax.saxutils\n p=create_parser()\n p.setContentHandler(xml.sax.saxutils.XMLGenerator())\n p.setErrorHandler(xml.sax.ErrorHandler())\n p.parse(\"http://www.ibiblio.org/xml/examples/shakespeare/hamlet.xml\")\n"], "urllib.parse": [".py", "''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\nimport re\nimport sys\nimport collections\n\n__all__=[\"urlparse\",\"urlunparse\",\"urljoin\",\"urldefrag\",\n\"urlsplit\",\"urlunsplit\",\"urlencode\",\"parse_qs\",\n\"parse_qsl\",\"quote\",\"quote_plus\",\"quote_from_bytes\",\n\"unquote\",\"unquote_plus\",\"unquote_to_bytes\"]\n\n\nuses_relative=['ftp','http','gopher','nntp','imap',\n'wais','file','https','shttp','mms',\n'prospero','rtsp','rtspu','','sftp',\n'svn','svn+ssh']\nuses_netloc=['ftp','http','gopher','nntp','telnet',\n'imap','wais','file','mms','https','shttp',\n'snews','prospero','rtsp','rtspu','rsync','',\n'svn','svn+ssh','sftp','nfs','git','git+ssh']\nuses_params=['ftp','hdl','prospero','http','imap',\n'https','shttp','rtsp','rtspu','sip','sips',\n'mms','','sftp','tel']\n\n\n\nnon_hierarchical=['gopher','hdl','mailto','news',\n'telnet','wais','imap','snews','sip','sips']\nuses_query=['http','wais','imap','https','shttp','mms',\n'gopher','rtsp','rtspu','sip','sips','']\nuses_fragment=['ftp','hdl','http','gopher','news',\n'nntp','wais','https','shttp','snews',\n'file','prospero','']\n\n\nscheme_chars=('abcdefghijklmnopqrstuvwxyz'\n'ABCDEFGHIJKLMNOPQRSTUVWXYZ'\n'0123456789'\n'+-.')\n\n\nMAX_CACHE_SIZE=20\n_parse_cache={}\n\ndef clear_cache():\n ''\n _parse_cache.clear()\n _safe_quoters.clear()\n \n \n \n \n \n \n \n \n_implicit_encoding='ascii'\n_implicit_errors='strict'\n\ndef _noop(obj):\n return obj\n \ndef _encode_result(obj,encoding=_implicit_encoding,\nerrors=_implicit_errors):\n return obj.encode(encoding,errors)\n \ndef _decode_args(args,encoding=_implicit_encoding,\nerrors=_implicit_errors):\n return tuple(x.decode(encoding,errors)if x else''for x in args)\n \ndef _coerce_args(*args):\n\n\n\n\n\n str_input=isinstance(args[0],str)\n for arg in args[1:]:\n \n \n if arg and isinstance(arg,str)!=str_input:\n raise TypeError(\"Cannot mix str and non-str arguments\")\n if str_input:\n return args+(_noop,)\n return _decode_args(args)+(_encode_result,)\n \n \nclass _ResultMixinStr(object):\n ''\n __slots__=()\n \n def encode(self,encoding='ascii',errors='strict'):\n return self._encoded_counterpart(*(x.encode(encoding,errors)for x in self))\n \n \nclass _ResultMixinBytes(object):\n ''\n __slots__=()\n \n def decode(self,encoding='ascii',errors='strict'):\n return self._decoded_counterpart(*(x.decode(encoding,errors)for x in self))\n \n \nclass _NetlocResultMixinBase(object):\n ''\n __slots__=()\n \n @property\n def username(self):\n return self._userinfo[0]\n \n @property\n def password(self):\n return self._userinfo[1]\n \n @property\n def hostname(self):\n hostname=self._hostinfo[0]\n if not hostname:\n hostname=None\n elif hostname is not None :\n hostname=hostname.lower()\n return hostname\n \n @property\n def port(self):\n port=self._hostinfo[1]\n if port is not None :\n port=int(port,10)\n \n if not (0 <=port <=65535):\n return None\n return port\n \n \nclass _NetlocResultMixinStr(_NetlocResultMixinBase,_ResultMixinStr):\n __slots__=()\n \n @property\n def _userinfo(self):\n netloc=self.netloc\n userinfo,have_info,hostinfo=netloc.rpartition('@')\n if have_info:\n username,have_password,password=userinfo.partition(':')\n if not have_password:\n password=None\n else :\n username=password=None\n return username,password\n \n @property\n def _hostinfo(self):\n netloc=self.netloc\n _,_,hostinfo=netloc.rpartition('@')\n _,have_open_br,bracketed=hostinfo.partition('[')\n if have_open_br:\n hostname,_,port=bracketed.partition(']')\n _,have_port,port=port.partition(':')\n else :\n hostname,have_port,port=hostinfo.partition(':')\n if not have_port:\n port=None\n return hostname,port\n \n \nclass _NetlocResultMixinBytes(_NetlocResultMixinBase,_ResultMixinBytes):\n __slots__=()\n \n @property\n def _userinfo(self):\n netloc=self.netloc\n userinfo,have_info,hostinfo=netloc.rpartition(b'@')\n if have_info:\n username,have_password,password=userinfo.partition(b':')\n if not have_password:\n password=None\n else :\n username=password=None\n return username,password\n \n @property\n def _hostinfo(self):\n netloc=self.netloc\n _,_,hostinfo=netloc.rpartition(b'@')\n _,have_open_br,bracketed=hostinfo.partition(b'[')\n if have_open_br:\n hostname,_,port=bracketed.partition(b']')\n _,have_port,port=port.partition(b':')\n else :\n hostname,have_port,port=hostinfo.partition(b':')\n if not have_port:\n port=None\n return hostname,port\n \n \nfrom collections import namedtuple\n\n_DefragResultBase=namedtuple('DefragResult','url fragment')\n_SplitResultBase=namedtuple('SplitResult','scheme netloc path query fragment')\n_ParseResultBase=namedtuple('ParseResult','scheme netloc path params query fragment')\n\n\n\n\nResultBase=_NetlocResultMixinStr\n\n\nclass DefragResult(_DefragResultBase,_ResultMixinStr):\n __slots__=()\n def geturl(self):\n if self.fragment:\n return self.url+'#'+self.fragment\n else :\n return self.url\n \nclass SplitResult(_SplitResultBase,_NetlocResultMixinStr):\n __slots__=()\n def geturl(self):\n return urlunsplit(self)\n \nclass ParseResult(_ParseResultBase,_NetlocResultMixinStr):\n __slots__=()\n def geturl(self):\n return urlunparse(self)\n \n \nclass DefragResultBytes(_DefragResultBase,_ResultMixinBytes):\n __slots__=()\n def geturl(self):\n if self.fragment:\n return self.url+b'#'+self.fragment\n else :\n return self.url\n \nclass SplitResultBytes(_SplitResultBase,_NetlocResultMixinBytes):\n __slots__=()\n def geturl(self):\n return urlunsplit(self)\n \nclass ParseResultBytes(_ParseResultBase,_NetlocResultMixinBytes):\n __slots__=()\n def geturl(self):\n return urlunparse(self)\n \n \ndef _fix_result_transcoding():\n _result_pairs=(\n (DefragResult,DefragResultBytes),\n (SplitResult,SplitResultBytes),\n (ParseResult,ParseResultBytes),\n )\n for _decoded,_encoded in _result_pairs:\n _decoded._encoded_counterpart=_encoded\n _encoded._decoded_counterpart=_decoded\n \n_fix_result_transcoding()\ndel _fix_result_transcoding\n\ndef urlparse(url,scheme='',allow_fragments=True ):\n ''\n\n\n\n \n url,scheme,_coerce_result=_coerce_args(url,scheme)\n splitresult=urlsplit(url,scheme,allow_fragments)\n scheme,netloc,url,query,fragment=splitresult\n if scheme in uses_params and';'in url:\n url,params=_splitparams(url)\n else :\n params=''\n result=ParseResult(scheme,netloc,url,params,query,fragment)\n return _coerce_result(result)\n \ndef _splitparams(url):\n if'/'in url:\n i=url.find(';',url.rfind('/'))\n if i <0:\n return url,''\n else :\n i=url.find(';')\n return url[:i],url[i+1:]\n \ndef _splitnetloc(url,start=0):\n delim=len(url)\n for c in'/?#':\n wdelim=url.find(c,start)\n if wdelim >=0:\n delim=min(delim,wdelim)\n return url[start:delim],url[delim:]\n \ndef urlsplit(url,scheme='',allow_fragments=True ):\n ''\n\n\n\n \n url,scheme,_coerce_result=_coerce_args(url,scheme)\n allow_fragments=bool(allow_fragments)\n key=url,scheme,allow_fragments,type(url),type(scheme)\n cached=_parse_cache.get(key,None )\n if cached:\n return _coerce_result(cached)\n if len(_parse_cache)>=MAX_CACHE_SIZE:\n clear_cache()\n netloc=query=fragment=''\n i=url.find(':')\n if i >0:\n if url[:i]=='http':\n scheme=url[:i].lower()\n url=url[i+1:]\n if url[:2]=='//':\n netloc,url=_splitnetloc(url,2)\n if (('['in netloc and']'not in netloc)or\n (']'in netloc and'['not in netloc)):\n raise ValueError(\"Invalid IPv6 URL\")\n if allow_fragments and'#'in url:\n url,fragment=url.split('#',1)\n if'?'in url:\n url,query=url.split('?',1)\n v=SplitResult(scheme,netloc,url,query,fragment)\n _parse_cache[key]=v\n return _coerce_result(v)\n for c in url[:i]:\n if c not in scheme_chars:\n break\n else :\n \n \n rest=url[i+1:]\n if not rest or any(c not in'0123456789'for c in rest):\n \n scheme,url=url[:i].lower(),rest\n \n if url[:2]=='//':\n netloc,url=_splitnetloc(url,2)\n if (('['in netloc and']'not in netloc)or\n (']'in netloc and'['not in netloc)):\n raise ValueError(\"Invalid IPv6 URL\")\n if allow_fragments and'#'in url:\n url,fragment=url.split('#',1)\n if'?'in url:\n url,query=url.split('?',1)\n v=SplitResult(scheme,netloc,url,query,fragment)\n _parse_cache[key]=v\n return _coerce_result(v)\n \ndef urlunparse(components):\n ''\n\n\n \n scheme,netloc,url,params,query,fragment,_coerce_result=(\n _coerce_args(*components))\n if params:\n url=\"%s;%s\"%(url,params)\n return _coerce_result(urlunsplit((scheme,netloc,url,query,fragment)))\n \ndef urlunsplit(components):\n ''\n\n\n\n \n scheme,netloc,url,query,fragment,_coerce_result=(\n _coerce_args(*components))\n if netloc or (scheme and scheme in uses_netloc and url[:2]!='//'):\n if url and url[:1]!='/':url='/'+url\n url='//'+(netloc or'')+url\n if scheme:\n url=scheme+':'+url\n if query:\n url=url+'?'+query\n if fragment:\n url=url+'#'+fragment\n return _coerce_result(url)\n \ndef urljoin(base,url,allow_fragments=True ):\n ''\n \n if not base:\n return url\n if not url:\n return base\n base,url,_coerce_result=_coerce_args(base,url)\n bscheme,bnetloc,bpath,bparams,bquery,bfragment= urlparse(base,'',allow_fragments)\n scheme,netloc,path,params,query,fragment= urlparse(url,bscheme,allow_fragments)\n if scheme !=bscheme or scheme not in uses_relative:\n return _coerce_result(url)\n if scheme in uses_netloc:\n if netloc:\n return _coerce_result(urlunparse((scheme,netloc,path,\n params,query,fragment)))\n netloc=bnetloc\n if path[:1]=='/':\n return _coerce_result(urlunparse((scheme,netloc,path,\n params,query,fragment)))\n if not path and not params:\n path=bpath\n params=bparams\n if not query:\n query=bquery\n return _coerce_result(urlunparse((scheme,netloc,path,\n params,query,fragment)))\n segments=bpath.split('/')[:-1]+path.split('/')\n \n if segments[-1]=='.':\n segments[-1]=''\n while'.'in segments:\n segments.remove('.')\n while 1:\n i=1\n n=len(segments)-1\n while i =2 and segments[-1]=='..':\n segments[-2:]=['']\n return _coerce_result(urlunparse((scheme,netloc,'/'.join(segments),\n params,query,fragment)))\n \ndef urldefrag(url):\n ''\n\n\n\n\n \n url,_coerce_result=_coerce_args(url)\n if'#'in url:\n s,n,p,a,q,frag=urlparse(url)\n defrag=urlunparse((s,n,p,a,q,''))\n else :\n frag=''\n defrag=url\n return _coerce_result(DefragResult(defrag,frag))\n \n_hexdig='0123456789ABCDEFabcdef'\n_hextobyte={(a+b).encode():bytes([int(a+b,16)])\nfor a in _hexdig for b in _hexdig}\n\ndef unquote_to_bytes(string):\n ''\n \n \n if not string:\n \n string.split\n return b''\n if isinstance(string,str):\n string=string.encode('utf-8')\n bits=string.split(b'%')\n if len(bits)==1:\n return string\n res=[bits[0]]\n append=res.append\n for item in bits[1:]:\n try :\n append(_hextobyte[item[:2]])\n append(item[2:])\n except KeyError:\n append(b'%')\n append(item)\n return b''.join(res)\n \n_asciire=re.compile('([\\x00-\\x7f]+)')\n\ndef unquote(string,encoding='utf-8',errors='replace'):\n ''\n\n\n\n\n\n\n\n \n if'%'not in string:\n string.split\n return string\n if encoding is None :\n encoding='utf-8'\n if errors is None :\n errors='replace'\n bits=_asciire.split(string)\n res=[bits[0]]\n append=res.append\n for i in range(1,len(bits),2):\n append(unquote_to_bytes(bits[i]).decode(encoding,errors))\n append(bits[i+1])\n return''.join(res)\n \ndef parse_qs(qs,keep_blank_values=False ,strict_parsing=False ,\nencoding='utf-8',errors='replace'):\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n parsed_result={}\n pairs=parse_qsl(qs,keep_blank_values,strict_parsing,\n encoding=encoding,errors=errors)\n for name,value in pairs:\n if name in parsed_result:\n parsed_result[name].append(value)\n else :\n parsed_result[name]=[value]\n return parsed_result\n \ndef parse_qsl(qs,keep_blank_values=False ,strict_parsing=False ,\nencoding='utf-8',errors='replace'):\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n qs,_coerce_result=_coerce_args(qs)\n pairs=[s2 for s1 in qs.split('&')for s2 in s1.split(';')]\n r=[]\n for name_value in pairs:\n if not name_value and not strict_parsing:\n continue\n nv=name_value.split('=',1)\n if len(nv)!=2:\n if strict_parsing:\n raise ValueError(\"bad query field: %r\"%(name_value,))\n \n if keep_blank_values:\n nv.append('')\n else :\n continue\n if len(nv[1])or keep_blank_values:\n name=nv[0].replace('+',' ')\n name=unquote(name,encoding=encoding,errors=errors)\n name=_coerce_result(name)\n value=nv[1].replace('+',' ')\n value=unquote(value,encoding=encoding,errors=errors)\n value=_coerce_result(value)\n r.append((name,value))\n return r\n \ndef unquote_plus(string,encoding='utf-8',errors='replace'):\n ''\n\n\n\n \n string=string.replace('+',' ')\n return unquote(string,encoding,errors)\n \n_ALWAYS_SAFE=frozenset(b'ABCDEFGHIJKLMNOPQRSTUVWXYZ'\nb'abcdefghijklmnopqrstuvwxyz'\nb'0123456789'\nb'_.-')\n_ALWAYS_SAFE_BYTES=bytes(_ALWAYS_SAFE)\n_safe_quoters={}\n\nclass Quoter(collections.defaultdict):\n ''\n\n\n\n \n \n \n def __init__(self,safe):\n ''\n self.safe=_ALWAYS_SAFE.union(safe)\n \n def __repr__(self):\n \n return\"\"%dict(self)\n \n def __missing__(self,b):\n \n res=chr(b)if b in self.safe else'%{:02X}'.format(b)\n self[b]=res\n return res\n \ndef quote(string,safe='/',encoding=None ,errors=None ):\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n if isinstance(string,str):\n if not string:\n return string\n if encoding is None :\n encoding='utf-8'\n if errors is None :\n errors='strict'\n string=string.encode(encoding,errors)\n else :\n if encoding is not None :\n raise TypeError(\"quote() doesn't support 'encoding' for bytes\")\n if errors is not None :\n raise TypeError(\"quote() doesn't support 'errors' for bytes\")\n return quote_from_bytes(string,safe)\n \ndef quote_plus(string,safe='',encoding=None ,errors=None ):\n ''\n\n\n \n \n \n if ((isinstance(string,str)and' 'not in string)or\n (isinstance(string,bytes)and b' 'not in string)):\n return quote(string,safe,encoding,errors)\n if isinstance(safe,str):\n space=' '\n else :\n space=b' '\n string=quote(string,safe+space,encoding,errors)\n return string.replace(' ','+')\n \ndef quote_from_bytes(bs,safe='/'):\n ''\n\n\n \n if not isinstance(bs,(bytes,bytearray)):\n raise TypeError(\"quote_from_bytes() expected bytes\")\n if not bs:\n return''\n if isinstance(safe,str):\n \n safe=safe.encode('ascii','ignore')\n else :\n safe=bytes([c for c in safe if c <128])\n if not bs.rstrip(_ALWAYS_SAFE_BYTES+safe):\n return bs.decode()\n try :\n quoter=_safe_quoters[safe]\n except KeyError:\n _safe_quoters[safe]=quoter=Quoter(safe).__getitem__\n return''.join([quoter(char)for char in bs])\n \ndef urlencode(query,doseq=False ,safe='',encoding=None ,errors=None ):\n ''\n\n\n\n\n\n\n\n\n\n\n\n \n \n if hasattr(query,\"items\"):\n query=query.items()\n else :\n \n \n try :\n \n \n if len(query)and not isinstance(query[0],tuple):\n raise TypeError\n \n \n \n \n except TypeError:\n ty,va,tb=sys.exc_info()\n raise TypeError(\"not a valid non-string sequence \"\n \"or mapping object\").with_traceback(tb)\n \n l=[]\n if not doseq:\n for k,v in query:\n if isinstance(k,bytes):\n k=quote_plus(k,safe)\n else :\n k=quote_plus(str(k),safe,encoding,errors)\n \n if isinstance(v,bytes):\n v=quote_plus(v,safe)\n else :\n v=quote_plus(str(v),safe,encoding,errors)\n l.append(k+'='+v)\n else :\n for k,v in query:\n if isinstance(k,bytes):\n k=quote_plus(k,safe)\n else :\n k=quote_plus(str(k),safe,encoding,errors)\n \n if isinstance(v,bytes):\n v=quote_plus(v,safe)\n l.append(k+'='+v)\n elif isinstance(v,str):\n v=quote_plus(v,safe,encoding,errors)\n l.append(k+'='+v)\n else :\n try :\n \n x=len(v)\n except TypeError:\n \n v=quote_plus(str(v),safe,encoding,errors)\n l.append(k+'='+v)\n else :\n \n for elt in v:\n if isinstance(elt,bytes):\n elt=quote_plus(elt,safe)\n else :\n elt=quote_plus(str(elt),safe,encoding,errors)\n l.append(k+'='+elt)\n return'&'.join(l)\n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \ndef to_bytes(url):\n ''\n \n \n \n if isinstance(url,str):\n try :\n url=url.encode(\"ASCII\").decode()\n except UnicodeError:\n raise UnicodeError(\"URL \"+repr(url)+\n \" contains non-ASCII characters\")\n return url\n \ndef unwrap(url):\n ''\n url=str(url).strip()\n if url[:1]=='<'and url[-1:]=='>':\n url=url[1:-1].strip()\n if url[:4]=='URL:':url=url[4:].strip()\n return url\n \n_typeprog=None\ndef splittype(url):\n ''\n global _typeprog\n if _typeprog is None :\n import re\n _typeprog=re.compile('^([^/:]+):')\n \n match=_typeprog.match(url)\n if match:\n scheme=match.group(1)\n return scheme.lower(),url[len(scheme)+1:]\n return None ,url\n \n_hostprog=None\ndef splithost(url):\n ''\n global _hostprog\n if _hostprog is None :\n import re\n _hostprog=re.compile('^//([^/?]*)(.*)$')\n \n match=_hostprog.match(url)\n if match:\n host_port=match.group(1)\n path=match.group(2)\n if path and not path.startswith('/'):\n path='/'+path\n return host_port,path\n return None ,url\n \n_userprog=None\ndef splituser(host):\n ''\n global _userprog\n if _userprog is None :\n import re\n _userprog=re.compile('^(.*)@(.*)$')\n \n match=_userprog.match(host)\n if match:return match.group(1,2)\n return None ,host\n \n_passwdprog=None\ndef splitpasswd(user):\n ''\n global _passwdprog\n if _passwdprog is None :\n import re\n _passwdprog=re.compile('^([^:]*):(.*)$',re.S)\n \n match=_passwdprog.match(user)\n if match:return match.group(1,2)\n return user,None\n \n \n_portprog=None\ndef splitport(host):\n ''\n global _portprog\n if _portprog is None :\n import re\n _portprog=re.compile('^(.*):([0-9]+)$')\n \n match=_portprog.match(host)\n if match:return match.group(1,2)\n return host,None\n \n_nportprog=None\ndef splitnport(host,defport=-1):\n ''\n\n\n \n global _nportprog\n if _nportprog is None :\n import re\n _nportprog=re.compile('^(.*):(.*)$')\n \n match=_nportprog.match(host)\n if match:\n host,port=match.group(1,2)\n try :\n if not port:raise ValueError(\"no digits\")\n nport=int(port)\n except ValueError:\n nport=None\n return host,nport\n return host,defport\n \n_queryprog=None\ndef splitquery(url):\n ''\n global _queryprog\n if _queryprog is None :\n import re\n _queryprog=re.compile('^(.*)\\?([^?]*)$')\n \n match=_queryprog.match(url)\n if match:return match.group(1,2)\n return url,None\n \n_tagprog=None\ndef splittag(url):\n ''\n global _tagprog\n if _tagprog is None :\n import re\n _tagprog=re.compile('^(.*)#([^#]*)$')\n \n match=_tagprog.match(url)\n if match:return match.group(1,2)\n return url,None\n \ndef splitattr(url):\n ''\n \n words=url.split(';')\n return words[0],words[1:]\n \n_valueprog=None\ndef splitvalue(attr):\n ''\n global _valueprog\n if _valueprog is None :\n import re\n _valueprog=re.compile('^([^=]*)=(.*)$')\n \n match=_valueprog.match(attr)\n if match:return match.group(1,2)\n return attr,None\n"], "sre_compile": [".py", "\n\n\n\n\n\n\n\n\n\n\"\"\"Internal support module for sre\"\"\"\n\n\nimport sys\nimport _sre\nimport sre_parse\nfrom sre_constants import *\nfrom _sre import MAXREPEAT\n\n\nassert _sre.MAGIC ==MAGIC,\"SRE module mismatch\"\n\nif _sre.CODESIZE ==2:\n MAXCODE=65535\nelse :\n MAXCODE=0xFFFFFFFF\n \ndef _identityfunction(x):\n return x\n \n \n_LITERAL_CODES=set([LITERAL,NOT_LITERAL])\n_REPEATING_CODES=set([REPEAT,MIN_REPEAT,MAX_REPEAT])\n_SUCCESS_CODES=set([SUCCESS,FAILURE])\n_ASSERT_CODES=set([ASSERT,ASSERT_NOT])\n\ndef _compile(code,pattern,flags):\n\n emit=code.append\n _len=len\n LITERAL_CODES=_LITERAL_CODES\n REPEATING_CODES=_REPEATING_CODES\n SUCCESS_CODES=_SUCCESS_CODES\n ASSERT_CODES=_ASSERT_CODES\n for op,av in pattern:\n \n \n if op in LITERAL_CODES:\n if flags&SRE_FLAG_IGNORECASE:\n emit(OPCODES[OP_IGNORE[op]])\n emit(_sre.getlower(av,flags))\n else :\n emit(OPCODES[op])\n emit(av)\n elif op is IN:\n if flags&SRE_FLAG_IGNORECASE:\n emit(OPCODES[OP_IGNORE[op]])\n def fixup(literal,flags=flags):\n return _sre.getlower(literal,flags)\n else :\n emit(OPCODES[op])\n fixup=_identityfunction\n skip=_len(code);emit(0)\n _compile_charset(av,flags,code,fixup)\n code[skip]=_len(code)-skip\n elif op is ANY:\n if flags&SRE_FLAG_DOTALL:\n emit(OPCODES[ANY_ALL])\n else :\n emit(OPCODES[ANY])\n elif op in REPEATING_CODES:\n if flags&SRE_FLAG_TEMPLATE:\n raise error(\"internal: unsupported template operator\")\n emit(OPCODES[REPEAT])\n skip=_len(code);emit(0)\n emit(av[0])\n emit(av[1])\n _compile(code,av[2],flags)\n emit(OPCODES[SUCCESS])\n code[skip]=_len(code)-skip\n elif _simple(av)and op is not REPEAT:\n if op is MAX_REPEAT:\n emit(OPCODES[REPEAT_ONE])\n else :\n emit(OPCODES[MIN_REPEAT_ONE])\n skip=_len(code);emit(0)\n emit(av[0])\n emit(av[1])\n _compile(code,av[2],flags)\n emit(OPCODES[SUCCESS])\n code[skip]=_len(code)-skip\n else :\n emit(OPCODES[REPEAT])\n skip=_len(code);emit(0)\n emit(av[0])\n emit(av[1])\n _compile(code,av[2],flags)\n code[skip]=_len(code)-skip\n if op is MAX_REPEAT:\n emit(OPCODES[MAX_UNTIL])\n else :\n emit(OPCODES[MIN_UNTIL])\n elif op is SUBPATTERN:\n if av[0]:\n emit(OPCODES[MARK])\n emit((av[0]-1)*2)\n \n _compile(code,av[1],flags)\n if av[0]:\n emit(OPCODES[MARK])\n emit((av[0]-1)*2+1)\n elif op in SUCCESS_CODES:\n emit(OPCODES[op])\n elif op in ASSERT_CODES:\n emit(OPCODES[op])\n skip=_len(code);emit(0)\n if av[0]>=0:\n emit(0)\n else :\n lo,hi=av[1].getwidth()\n if lo !=hi:\n raise error(\"look-behind requires fixed-width pattern\")\n emit(lo)\n _compile(code,av[1],flags)\n emit(OPCODES[SUCCESS])\n code[skip]=_len(code)-skip\n elif op is CALL:\n emit(OPCODES[op])\n skip=_len(code);emit(0)\n _compile(code,av,flags)\n emit(OPCODES[SUCCESS])\n code[skip]=_len(code)-skip\n elif op is AT:\n emit(OPCODES[op])\n if flags&SRE_FLAG_MULTILINE:\n av=AT_MULTILINE.get(av,av)\n if flags&SRE_FLAG_LOCALE:\n av=AT_LOCALE.get(av,av)\n elif flags&SRE_FLAG_UNICODE:\n av=AT_UNICODE.get(av,av)\n emit(ATCODES[av])\n elif op is BRANCH:\n emit(OPCODES[op])\n tail=[]\n tailappend=tail.append\n for av in av[1]:\n skip=_len(code);emit(0)\n \n _compile(code,av,flags)\n emit(OPCODES[JUMP])\n tailappend(_len(code));emit(0)\n code[skip]=_len(code)-skip\n emit(0)\n for tail in tail:\n code[tail]=_len(code)-tail\n elif op is CATEGORY:\n emit(OPCODES[op])\n if flags&SRE_FLAG_LOCALE:\n av=CH_LOCALE[av]\n elif flags&SRE_FLAG_UNICODE:\n av=CH_UNICODE[av]\n emit(CHCODES[av])\n elif op is GROUPREF:\n if flags&SRE_FLAG_IGNORECASE:\n emit(OPCODES[OP_IGNORE[op]])\n else :\n emit(OPCODES[op])\n emit(av -1)\n elif op is GROUPREF_EXISTS:\n emit(OPCODES[op])\n emit(av[0]-1)\n skipyes=_len(code);emit(0)\n _compile(code,av[1],flags)\n if av[2]:\n emit(OPCODES[JUMP])\n skipno=_len(code);emit(0)\n code[skipyes]=_len(code)-skipyes+1\n _compile(code,av[2],flags)\n code[skipno]=_len(code)-skipno\n else :\n code[skipyes]=_len(code)-skipyes+1\n else :\n raise ValueError(\"unsupported operand type\",op)\n \ndef _compile_charset(charset,flags,code,fixup=None ):\n\n emit=code.append\n if fixup is None :\n fixup=_identityfunction\n for op,av in _optimize_charset(charset,fixup):\n emit(OPCODES[op])\n if op is NEGATE:\n pass\n elif op is LITERAL:\n emit(fixup(av))\n elif op is RANGE:\n emit(fixup(av[0]))\n emit(fixup(av[1]))\n elif op is CHARSET:\n code.extend(av)\n elif op is BIGCHARSET:\n code.extend(av)\n elif op is CATEGORY:\n if flags&SRE_FLAG_LOCALE:\n emit(CHCODES[CH_LOCALE[av]])\n elif flags&SRE_FLAG_UNICODE:\n emit(CHCODES[CH_UNICODE[av]])\n else :\n emit(CHCODES[av])\n else :\n raise error(\"internal: unsupported set operator\")\n emit(OPCODES[FAILURE])\n \n \ndef _optimize_charset(charset,fixup):\n\n out=[]\n outappend=out.append\n charmap=[0]*256\n try :\n for op,av in charset:\n if op is NEGATE:\n outappend((op,av))\n elif op is LITERAL:\n charmap[fixup(av)]=1\n elif op is RANGE:\n for i in range(fixup(av[0]),fixup(av[1])+1):\n charmap[i]=1\n elif op is CATEGORY:\n \n return charset\n except IndexError:\n \n return _optimize_unicode(charset,fixup)\n \n i=p=n=0\n runs=[]\n runsappend=runs.append\n for c in charmap:\n if c:\n if n ==0:\n p=i\n n=n+1\n elif n:\n runsappend((p,n))\n n=0\n i=i+1\n if n:\n runsappend((p,n))\n if len(runs)<=2:\n \n for p,n in runs:\n if n ==1:\n outappend((LITERAL,p))\n else :\n outappend((RANGE,(p,p+n -1)))\n if len(out)MAXCODE:\n dataappend(v)\n m,v=start\n return data\n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \ndef _optimize_unicode(charset,fixup):\n try :\n import array\n except ImportError:\n return charset\n charmap=[0]*65536\n negate=0\n try :\n for op,av in charset:\n if op is NEGATE:\n negate=1\n elif op is LITERAL:\n charmap[fixup(av)]=1\n elif op is RANGE:\n for i in range(fixup(av[0]),fixup(av[1])+1):\n charmap[i]=1\n elif op is CATEGORY:\n \n return charset\n except IndexError:\n \n return charset\n if negate:\n if sys.maxunicode !=65535:\n \n \n \n return charset\n for i in range(65536):\n charmap[i]=not charmap[i]\n comps={}\n mapping=[0]*256\n block=0\n data=[]\n for i in range(256):\n chunk=tuple(charmap[i *256:(i+1)*256])\n new=comps.setdefault(chunk,block)\n mapping[i]=new\n if new ==block:\n block=block+1\n data=data+_mk_bitmap(chunk)\n header=[block]\n if _sre.CODESIZE ==2:\n code='H'\n else :\n code='I'\n \n mapping=array.array('b',mapping).tobytes()\n \n mapping=array.array(code,mapping)\n assert mapping.itemsize ==_sre.CODESIZE\n assert len(mapping)*mapping.itemsize ==256\n header=header+mapping.tolist()\n data[0:0]=header\n return [(BIGCHARSET,data)]\n \ndef _simple(av):\n\n lo,hi=av[2].getwidth()\n if lo ==0 and hi ==MAXREPEAT:\n raise error(\"nothing to repeat\")\n return lo ==hi ==1 and av[2][0][0]!=SUBPATTERN\n \ndef _compile_info(code,pattern,flags):\n\n\n\n lo,hi=pattern.getwidth()\n \n if lo ==0:\n return\n \n prefix=[]\n prefixappend=prefix.append\n prefix_skip=0\n charset=[]\n charsetappend=charset.append\n if not (flags&SRE_FLAG_IGNORECASE):\n \n for op,av in pattern.data:\n \n if op is LITERAL:\n if len(prefix)==prefix_skip:\n prefix_skip=prefix_skip+1\n prefixappend(av)\n elif op is SUBPATTERN and len(av[1])==1:\n op,av=av[1][0]\n if op is LITERAL:\n prefixappend(av)\n else :\n break\n else :\n break\n \n if not prefix and pattern.data:\n op,av=pattern.data[0]\n if op is SUBPATTERN and av[1]:\n op,av=av[1][0]\n if op is LITERAL:\n charsetappend((op,av))\n elif op is BRANCH:\n c=[]\n cappend=c.append\n for p in av[1]:\n if not p:\n break\n op,av=p[0]\n if op is LITERAL:\n cappend((op,av))\n else :\n break\n else :\n charset=c\n elif op is BRANCH:\n c=[]\n cappend=c.append\n for p in av[1]:\n if not p:\n break\n op,av=p[0]\n if op is LITERAL:\n cappend((op,av))\n else :\n break\n else :\n charset=c\n elif op is IN:\n charset=av\n \n \n \n \n \n \n \n emit=code.append\n emit(OPCODES[INFO])\n skip=len(code);emit(0)\n \n mask=0\n if prefix:\n mask=SRE_INFO_PREFIX\n if len(prefix)==prefix_skip ==len(pattern.data):\n mask=mask+SRE_INFO_LITERAL\n elif charset:\n mask=mask+SRE_INFO_CHARSET\n emit(mask)\n \n if lo 0 and prefix[i]!=prefix[table[i+1]-1]:\n table[i+1]=table[table[i+1]-1]+1\n code.extend(table[1:])\n elif charset:\n _compile_charset(charset,flags,code)\n code[skip]=len(code)-skip\n \ndef isstring(obj):\n return isinstance(obj,(str,bytes))\n \ndef _code(p,flags):\n\n flags=p.pattern.flags |flags\n code=[]\n \n \n _compile_info(code,p,flags)\n \n \n _compile(code,p.data,flags)\n \n code.append(OPCODES[SUCCESS])\n \n return code\n \ndef compile(p,flags=0):\n\n\n\n if isstring(p):\n pattern=p\n p=sre_parse.parse(p,flags)\n else :\n pattern=None\n \n \n code=_code(p,flags)\n \n \n \n \n \n if p.pattern.groups >100:\n raise AssertionError(\n \"sorry, but this version only supports 100 named groups\"\n )\n \n \n groupindex=p.pattern.groupdict\n indexgroup=[None ]*p.pattern.groups\n for k,i in groupindex.items():\n indexgroup[i]=k\n \n return _sre.compile(\n pattern,flags |p.pattern.flags,code,\n p.pattern.groups -1,\n groupindex,indexgroup\n )\n"], "browser.ajax": [".py", "from _ajax import *\n\nclass Ajax:\n\n def __init__(self,url,**kw):\n self.req=ajax1()\n method=kw.get('method','GET')\n self.req.open(method,url,kw.get('async',True ))\n \n def send(self,data=None ):\n if data is not None :\n self.req.send(data)\n else :\n self.req.send()\n \n def set_header(self,key,value):\n self.req.set_header(key,value)\n \n def set_timeout(self,seconds,func):\n self.req.set_timeout(seconds,func)\n \n @property\n def status(self):\n return self.req.status\n \n def complete(self,f):\n ''\n self.req.bind(\"complete\",f)\n return f\n \n @property\n def text(self):\n return self.req.responseText"], "_testcapi": [".py", "\nCHAR_MAX=127\n\nCHAR_MIN=-128\n\nDBL_MAX=1.7976931348623157e+308\n\nDBL_MIN=2.2250738585072014e-308\n\nFLT_MAX=3.4028234663852886e+38\n\nFLT_MIN=1.1754943508222875e-38\n\nINT_MAX=2147483647\n\nINT_MIN=-2147483648\n\nLLONG_MAX=9223372036854775807\n\nLLONG_MIN=-9223372036854775808\n\nLONG_MAX=2147483647\n\nLONG_MIN=-2147483648\n\nPY_SSIZE_T_MAX=2147483647\n\nPY_SSIZE_T_MIN=-2147483648\n\nSHRT_MAX=32767\n\nSHRT_MIN=-32768\n\nSIZEOF_PYGC_HEAD=16\n\nUCHAR_MAX=255\n\nUINT_MAX=4294967295\n\nULLONG_MAX=18446744073709551615\n\nULONG_MAX=4294967295\n\nUSHRT_MAX=65535\n\n__loader__=\"<_frozen_importlib.ExtensionFileLoader object at 0x00C98DD0>\"\n\ndef _pending_threadfunc(*args,**kw):\n pass\n \nclass _test_structmembersType(object):\n pass\n \ndef _test_thread_state(*args,**kw):\n pass\n \ndef argparsing(*args,**kw):\n pass\n \ndef code_newempty(*args,**kw):\n pass\n \ndef codec_incrementaldecoder(*args,**kw):\n pass\n \ndef codec_incrementalencoder(*args,**kw):\n pass\n \ndef crash_no_current_thread(*args,**kw):\n pass\n \nclass error(Exception):\n pass\n \ndef exception_print(*args,**kw):\n pass\n \ndef getargs_B(*args,**kw):\n pass\n \ndef getargs_H(*args,**kw):\n pass\n \ndef getargs_I(*args,**kw):\n pass\n \ndef getargs_K(*args,**kw):\n pass\n \ndef getargs_L(*args,**kw):\n pass\n \ndef getargs_Z(*args,**kw):\n pass\n \ndef getargs_Z_hash(*args,**kw):\n pass\n \ndef getargs_b(*args,**kw):\n pass\n \ndef getargs_c(*args,**kw):\n pass\n \ndef getargs_h(*args,**kw):\n pass\n \ndef getargs_i(*args,**kw):\n pass\n \ndef getargs_k(*args,**kw):\n pass\n \ndef getargs_keyword_only(*args,**kw):\n pass\n \ndef getargs_keywords(*args,**kw):\n pass\n \ndef getargs_l(*args,**kw):\n pass\n \ndef getargs_n(*args,**kw):\n pass\n \ndef getargs_p(*args,**kw):\n pass\n \ndef getargs_s(*args,**kw):\n pass\n \ndef getargs_s_hash(*args,**kw):\n pass\n \ndef getargs_s_star(*args,**kw):\n pass\n \ndef getargs_tuple(*args,**kw):\n pass\n \ndef getargs_u(*args,**kw):\n pass\n \ndef getargs_u_hash(*args,**kw):\n pass\n \ndef getargs_w_star(*args,**kw):\n pass\n \ndef getargs_y(*args,**kw):\n pass\n \ndef getargs_y_hash(*args,**kw):\n pass\n \ndef getargs_y_star(*args,**kw):\n pass\n \ndef getargs_z(*args,**kw):\n pass\n \ndef getargs_z_hash(*args,**kw):\n pass\n \ndef getargs_z_star(*args,**kw):\n pass\n \nclass instancemethod(object):\n pass\n \ndef make_exception_with_doc(*args,**kw):\n pass\n \ndef make_memoryview_from_NULL_pointer(*args,**kw):\n pass\n \ndef parse_tuple_and_keywords(*args,**kw):\n pass\n \ndef pytime_object_to_time_t(*args,**kw):\n pass\n \ndef pytime_object_to_timespec(*args,**kw):\n pass\n \ndef pytime_object_to_timeval(*args,**kw):\n pass\n \ndef raise_exception(*args,**kw):\n pass\n \ndef raise_memoryerror(*args,**kw):\n pass\n \ndef run_in_subinterp(*args,**kw):\n pass\n \ndef set_exc_info(*args,**kw):\n pass\n \ndef test_L_code(*args,**kw):\n pass\n \ndef test_Z_code(*args,**kw):\n pass\n \ndef test_capsule(*args,**kw):\n pass\n \ndef test_config(*args,**kw):\n pass\n \ndef test_datetime_capi(*args,**kw):\n pass\n \ndef test_dict_iteration(*args,**kw):\n pass\n \ndef test_empty_argparse(*args,**kw):\n pass\n \ndef test_k_code(*args,**kw):\n pass\n \ndef test_lazy_hash_inheritance(*args,**kw):\n pass\n \ndef test_list_api(*args,**kw):\n pass\n \ndef test_long_and_overflow(*args,**kw):\n pass\n \ndef test_long_api(*args,**kw):\n pass\n \ndef test_long_as_double(*args,**kw):\n pass\n \ndef test_long_as_size_t(*args,**kw):\n pass\n \ndef test_long_long_and_overflow(*args,**kw):\n pass\n \ndef test_long_numbits(*args,**kw):\n pass\n \ndef test_longlong_api(*args,**kw):\n pass\n \ndef test_null_strings(*args,**kw):\n pass\n \ndef test_s_code(*args,**kw):\n pass\n \ndef test_string_from_format(*args,**kw):\n pass\n \ndef test_string_to_double(*args,**kw):\n pass\n \ndef test_u_code(*args,**kw):\n pass\n \ndef test_unicode_compare_with_ascii(*args,**kw):\n pass\n \ndef test_widechar(*args,**kw):\n pass\n \ndef test_with_docstring(*args,**kw):\n ''\n pass\n \ndef traceback_print(*args,**kw):\n pass\n \ndef unicode_aswidechar(*args,**kw):\n pass\n \ndef unicode_aswidecharstring(*args,**kw):\n pass\n \ndef unicode_encodedecimal(*args,**kw):\n pass\n \ndef unicode_transformdecimaltoascii(*args,**kw):\n pass\n"], "jqueryui.jquery-ui.min": [".js", "/*! jQuery UI - v1.11.4 - 2015-03-11\n* http://jqueryui.com\n* Includes: core.js, widget.js, mouse.js, position.js, accordion.js, autocomplete.js, button.js, datepicker.js, dialog.js, draggable.js, droppable.js, effect.js, effect-blind.js, effect-bounce.js, effect-clip.js, effect-drop.js, effect-explode.js, effect-fade.js, effect-fold.js, effect-highlight.js, effect-puff.js, effect-pulsate.js, effect-scale.js, effect-shake.js, effect-size.js, effect-slide.js, effect-transfer.js, menu.js, progressbar.js, resizable.js, selectable.js, selectmenu.js, slider.js, sortable.js, spinner.js, tabs.js, tooltip.js\n* Copyright 2015 jQuery Foundation and other contributors; Licensed MIT */\n\n(function(e){\"function\"==typeof define&&define.amd?define([\"jquery\"],e):e(jQuery)})(function(e){function t(t,s){var n,a,o,r=t.nodeName.toLowerCase();return\"area\"===r?(n=t.parentNode,a=n.name,t.href&&a&&\"map\"===n.nodeName.toLowerCase()?(o=e(\"img[usemap='#\"+a+\"']\")[0],!!o&&i(o)):!1):(/^(input|select|textarea|button|object)$/.test(r)?!t.disabled:\"a\"===r?t.href||s:s)&&i(t)}function i(t){return e.expr.filters.visible(t)&&!e(t).parents().addBack().filter(function(){return\"hidden\"===e.css(this,\"visibility\")}).length}function s(e){for(var t,i;e.length&&e[0]!==document;){if(t=e.css(\"position\"),(\"absolute\"===t||\"relative\"===t||\"fixed\"===t)&&(i=parseInt(e.css(\"zIndex\"),10),!isNaN(i)&&0!==i))return i;e=e.parent()}return 0}function n(){this._curInst=null,this._keyEvent=!1,this._disabledInputs=[],this._datepickerShowing=!1,this._inDialog=!1,this._mainDivId=\"ui-datepicker-div\",this._inlineClass=\"ui-datepicker-inline\",this._appendClass=\"ui-datepicker-append\",this._triggerClass=\"ui-datepicker-trigger\",this._dialogClass=\"ui-datepicker-dialog\",this._disableClass=\"ui-datepicker-disabled\",this._unselectableClass=\"ui-datepicker-unselectable\",this._currentClass=\"ui-datepicker-current-day\",this._dayOverClass=\"ui-datepicker-days-cell-over\",this.regional=[],this.regional[\"\"]={closeText:\"Done\",prevText:\"Prev\",nextText:\"Next\",currentText:\"Today\",monthNames:[\"January\",\"February\",\"March\",\"April\",\"May\",\"June\",\"July\",\"August\",\"September\",\"October\",\"November\",\"December\"],monthNamesShort:[\"Jan\",\"Feb\",\"Mar\",\"Apr\",\"May\",\"Jun\",\"Jul\",\"Aug\",\"Sep\",\"Oct\",\"Nov\",\"Dec\"],dayNames:[\"Sunday\",\"Monday\",\"Tuesday\",\"Wednesday\",\"Thursday\",\"Friday\",\"Saturday\"],dayNamesShort:[\"Sun\",\"Mon\",\"Tue\",\"Wed\",\"Thu\",\"Fri\",\"Sat\"],dayNamesMin:[\"Su\",\"Mo\",\"Tu\",\"We\",\"Th\",\"Fr\",\"Sa\"],weekHeader:\"Wk\",dateFormat:\"mm/dd/yy\",firstDay:0,isRTL:!1,showMonthAfterYear:!1,yearSuffix:\"\"},this._defaults={showOn:\"focus\",showAnim:\"fadeIn\",showOptions:{},defaultDate:null,appendText:\"\",buttonText:\"...\",buttonImage:\"\",buttonImageOnly:!1,hideIfNoPrevNext:!1,navigationAsDateFormat:!1,gotoCurrent:!1,changeMonth:!1,changeYear:!1,yearRange:\"c-10:c+10\",showOtherMonths:!1,selectOtherMonths:!1,showWeek:!1,calculateWeek:this.iso8601Week,shortYearCutoff:\"+10\",minDate:null,maxDate:null,duration:\"fast\",beforeShowDay:null,beforeShow:null,onSelect:null,onChangeMonthYear:null,onClose:null,numberOfMonths:1,showCurrentAtPos:0,stepMonths:1,stepBigMonths:12,altField:\"\",altFormat:\"\",constrainInput:!0,showButtonPanel:!1,autoSize:!1,disabled:!1},e.extend(this._defaults,this.regional[\"\"]),this.regional.en=e.extend(!0,{},this.regional[\"\"]),this.regional[\"en-US\"]=e.extend(!0,{},this.regional.en),this.dpDiv=a(e(\"

    \"))}function a(t){var i=\"button, .ui-datepicker-prev, .ui-datepicker-next, .ui-datepicker-calendar td a\";return t.delegate(i,\"mouseout\",function(){e(this).removeClass(\"ui-state-hover\"),-1!==this.className.indexOf(\"ui-datepicker-prev\")&&e(this).removeClass(\"ui-datepicker-prev-hover\"),-1!==this.className.indexOf(\"ui-datepicker-next\")&&e(this).removeClass(\"ui-datepicker-next-hover\")}).delegate(i,\"mouseover\",o)}function o(){e.datepicker._isDisabledDatepicker(v.inline?v.dpDiv.parent()[0]:v.input[0])||(e(this).parents(\".ui-datepicker-calendar\").find(\"a\").removeClass(\"ui-state-hover\"),e(this).addClass(\"ui-state-hover\"),-1!==this.className.indexOf(\"ui-datepicker-prev\")&&e(this).addClass(\"ui-datepicker-prev-hover\"),-1!==this.className.indexOf(\"ui-datepicker-next\")&&e(this).addClass(\"ui-datepicker-next-hover\"))}function r(t,i){e.extend(t,i);for(var s in i)null==i[s]&&(t[s]=i[s]);return t}function h(e){return function(){var t=this.element.val();e.apply(this,arguments),this._refresh(),t!==this.element.val()&&this._trigger(\"change\")}}e.ui=e.ui||{},e.extend(e.ui,{version:\"1.11.4\",keyCode:{BACKSPACE:8,COMMA:188,DELETE:46,DOWN:40,END:35,ENTER:13,ESCAPE:27,HOME:36,LEFT:37,PAGE_DOWN:34,PAGE_UP:33,PERIOD:190,RIGHT:39,SPACE:32,TAB:9,UP:38}}),e.fn.extend({scrollParent:function(t){var i=this.css(\"position\"),s=\"absolute\"===i,n=t?/(auto|scroll|hidden)/:/(auto|scroll)/,a=this.parents().filter(function(){var t=e(this);return s&&\"static\"===t.css(\"position\")?!1:n.test(t.css(\"overflow\")+t.css(\"overflow-y\")+t.css(\"overflow-x\"))}).eq(0);return\"fixed\"!==i&&a.length?a:e(this[0].ownerDocument||document)},uniqueId:function(){var e=0;return function(){return this.each(function(){this.id||(this.id=\"ui-id-\"+ ++e)})}}(),removeUniqueId:function(){return this.each(function(){/^ui-id-\\d+$/.test(this.id)&&e(this).removeAttr(\"id\")})}}),e.extend(e.expr[\":\"],{data:e.expr.createPseudo?e.expr.createPseudo(function(t){return function(i){return!!e.data(i,t)}}):function(t,i,s){return!!e.data(t,s[3])},focusable:function(i){return t(i,!isNaN(e.attr(i,\"tabindex\")))},tabbable:function(i){var s=e.attr(i,\"tabindex\"),n=isNaN(s);return(n||s>=0)&&t(i,!n)}}),e(\"
    \").outerWidth(1).jquery||e.each([\"Width\",\"Height\"],function(t,i){function s(t,i,s,a){return e.each(n,function(){i-=parseFloat(e.css(t,\"padding\"+this))||0,s&&(i-=parseFloat(e.css(t,\"border\"+this+\"Width\"))||0),a&&(i-=parseFloat(e.css(t,\"margin\"+this))||0)}),i}var n=\"Width\"===i?[\"Left\",\"Right\"]:[\"Top\",\"Bottom\"],a=i.toLowerCase(),o={innerWidth:e.fn.innerWidth,innerHeight:e.fn.innerHeight,outerWidth:e.fn.outerWidth,outerHeight:e.fn.outerHeight};e.fn[\"inner\"+i]=function(t){return void 0===t?o[\"inner\"+i].call(this):this.each(function(){e(this).css(a,s(this,t)+\"px\")})},e.fn[\"outer\"+i]=function(t,n){return\"number\"!=typeof t?o[\"outer\"+i].call(this,t):this.each(function(){e(this).css(a,s(this,t,!0,n)+\"px\")})}}),e.fn.addBack||(e.fn.addBack=function(e){return this.add(null==e?this.prevObject:this.prevObject.filter(e))}),e(\"\").data(\"a-b\",\"a\").removeData(\"a-b\").data(\"a-b\")&&(e.fn.removeData=function(t){return function(i){return arguments.length?t.call(this,e.camelCase(i)):t.call(this)}}(e.fn.removeData)),e.ui.ie=!!/msie [\\w.]+/.exec(navigator.userAgent.toLowerCase()),e.fn.extend({focus:function(t){return function(i,s){return\"number\"==typeof i?this.each(function(){var t=this;setTimeout(function(){e(t).focus(),s&&s.call(t)},i)}):t.apply(this,arguments)}}(e.fn.focus),disableSelection:function(){var e=\"onselectstart\"in document.createElement(\"div\")?\"selectstart\":\"mousedown\";return function(){return this.bind(e+\".ui-disableSelection\",function(e){e.preventDefault()})}}(),enableSelection:function(){return this.unbind(\".ui-disableSelection\")},zIndex:function(t){if(void 0!==t)return this.css(\"zIndex\",t);if(this.length)for(var i,s,n=e(this[0]);n.length&&n[0]!==document;){if(i=n.css(\"position\"),(\"absolute\"===i||\"relative\"===i||\"fixed\"===i)&&(s=parseInt(n.css(\"zIndex\"),10),!isNaN(s)&&0!==s))return s;n=n.parent()}return 0}}),e.ui.plugin={add:function(t,i,s){var n,a=e.ui[t].prototype;for(n in s)a.plugins[n]=a.plugins[n]||[],a.plugins[n].push([i,s[n]])},call:function(e,t,i,s){var n,a=e.plugins[t];if(a&&(s||e.element[0].parentNode&&11!==e.element[0].parentNode.nodeType))for(n=0;a.length>n;n++)e.options[a[n][0]]&&a[n][1].apply(e.element,i)}};var l=0,u=Array.prototype.slice;e.cleanData=function(t){return function(i){var s,n,a;for(a=0;null!=(n=i[a]);a++)try{s=e._data(n,\"events\"),s&&s.remove&&e(n).triggerHandler(\"remove\")}catch(o){}t(i)}}(e.cleanData),e.widget=function(t,i,s){var n,a,o,r,h={},l=t.split(\".\")[0];return t=t.split(\".\")[1],n=l+\"-\"+t,s||(s=i,i=e.Widget),e.expr[\":\"][n.toLowerCase()]=function(t){return!!e.data(t,n)},e[l]=e[l]||{},a=e[l][t],o=e[l][t]=function(e,t){return this._createWidget?(arguments.length&&this._createWidget(e,t),void 0):new o(e,t)},e.extend(o,a,{version:s.version,_proto:e.extend({},s),_childConstructors:[]}),r=new i,r.options=e.widget.extend({},r.options),e.each(s,function(t,s){return e.isFunction(s)?(h[t]=function(){var e=function(){return i.prototype[t].apply(this,arguments)},n=function(e){return i.prototype[t].apply(this,e)};return function(){var t,i=this._super,a=this._superApply;return this._super=e,this._superApply=n,t=s.apply(this,arguments),this._super=i,this._superApply=a,t}}(),void 0):(h[t]=s,void 0)}),o.prototype=e.widget.extend(r,{widgetEventPrefix:a?r.widgetEventPrefix||t:t},h,{constructor:o,namespace:l,widgetName:t,widgetFullName:n}),a?(e.each(a._childConstructors,function(t,i){var s=i.prototype;e.widget(s.namespace+\".\"+s.widgetName,o,i._proto)}),delete a._childConstructors):i._childConstructors.push(o),e.widget.bridge(t,o),o},e.widget.extend=function(t){for(var i,s,n=u.call(arguments,1),a=0,o=n.length;o>a;a++)for(i in n[a])s=n[a][i],n[a].hasOwnProperty(i)&&void 0!==s&&(t[i]=e.isPlainObject(s)?e.isPlainObject(t[i])?e.widget.extend({},t[i],s):e.widget.extend({},s):s);return t},e.widget.bridge=function(t,i){var s=i.prototype.widgetFullName||t;e.fn[t]=function(n){var a=\"string\"==typeof n,o=u.call(arguments,1),r=this;return a?this.each(function(){var i,a=e.data(this,s);return\"instance\"===n?(r=a,!1):a?e.isFunction(a[n])&&\"_\"!==n.charAt(0)?(i=a[n].apply(a,o),i!==a&&void 0!==i?(r=i&&i.jquery?r.pushStack(i.get()):i,!1):void 0):e.error(\"no such method '\"+n+\"' for \"+t+\" widget instance\"):e.error(\"cannot call methods on \"+t+\" prior to initialization; \"+\"attempted to call method '\"+n+\"'\")}):(o.length&&(n=e.widget.extend.apply(null,[n].concat(o))),this.each(function(){var t=e.data(this,s);t?(t.option(n||{}),t._init&&t._init()):e.data(this,s,new i(n,this))})),r}},e.Widget=function(){},e.Widget._childConstructors=[],e.Widget.prototype={widgetName:\"widget\",widgetEventPrefix:\"\",defaultElement:\"
    \",options:{disabled:!1,create:null},_createWidget:function(t,i){i=e(i||this.defaultElement||this)[0],this.element=e(i),this.uuid=l++,this.eventNamespace=\".\"+this.widgetName+this.uuid,this.bindings=e(),this.hoverable=e(),this.focusable=e(),i!==this&&(e.data(i,this.widgetFullName,this),this._on(!0,this.element,{remove:function(e){e.target===i&&this.destroy()}}),this.document=e(i.style?i.ownerDocument:i.document||i),this.window=e(this.document[0].defaultView||this.document[0].parentWindow)),this.options=e.widget.extend({},this.options,this._getCreateOptions(),t),this._create(),this._trigger(\"create\",null,this._getCreateEventData()),this._init()},_getCreateOptions:e.noop,_getCreateEventData:e.noop,_create:e.noop,_init:e.noop,destroy:function(){this._destroy(),this.element.unbind(this.eventNamespace).removeData(this.widgetFullName).removeData(e.camelCase(this.widgetFullName)),this.widget().unbind(this.eventNamespace).removeAttr(\"aria-disabled\").removeClass(this.widgetFullName+\"-disabled \"+\"ui-state-disabled\"),this.bindings.unbind(this.eventNamespace),this.hoverable.removeClass(\"ui-state-hover\"),this.focusable.removeClass(\"ui-state-focus\")},_destroy:e.noop,widget:function(){return this.element},option:function(t,i){var s,n,a,o=t;if(0===arguments.length)return e.widget.extend({},this.options);if(\"string\"==typeof t)if(o={},s=t.split(\".\"),t=s.shift(),s.length){for(n=o[t]=e.widget.extend({},this.options[t]),a=0;s.length-1>a;a++)n[s[a]]=n[s[a]]||{},n=n[s[a]];if(t=s.pop(),1===arguments.length)return void 0===n[t]?null:n[t];n[t]=i}else{if(1===arguments.length)return void 0===this.options[t]?null:this.options[t];o[t]=i}return this._setOptions(o),this},_setOptions:function(e){var t;for(t in e)this._setOption(t,e[t]);return this},_setOption:function(e,t){return this.options[e]=t,\"disabled\"===e&&(this.widget().toggleClass(this.widgetFullName+\"-disabled\",!!t),t&&(this.hoverable.removeClass(\"ui-state-hover\"),this.focusable.removeClass(\"ui-state-focus\"))),this},enable:function(){return this._setOptions({disabled:!1})},disable:function(){return this._setOptions({disabled:!0})},_on:function(t,i,s){var n,a=this;\"boolean\"!=typeof t&&(s=i,i=t,t=!1),s?(i=n=e(i),this.bindings=this.bindings.add(i)):(s=i,i=this.element,n=this.widget()),e.each(s,function(s,o){function r(){return t||a.options.disabled!==!0&&!e(this).hasClass(\"ui-state-disabled\")?(\"string\"==typeof o?a[o]:o).apply(a,arguments):void 0}\"string\"!=typeof o&&(r.guid=o.guid=o.guid||r.guid||e.guid++);var h=s.match(/^([\\w:-]*)\\s*(.*)$/),l=h[1]+a.eventNamespace,u=h[2];u?n.delegate(u,l,r):i.bind(l,r)})},_off:function(t,i){i=(i||\"\").split(\" \").join(this.eventNamespace+\" \")+this.eventNamespace,t.unbind(i).undelegate(i),this.bindings=e(this.bindings.not(t).get()),this.focusable=e(this.focusable.not(t).get()),this.hoverable=e(this.hoverable.not(t).get())},_delay:function(e,t){function i(){return(\"string\"==typeof e?s[e]:e).apply(s,arguments)}var s=this;return setTimeout(i,t||0)},_hoverable:function(t){this.hoverable=this.hoverable.add(t),this._on(t,{mouseenter:function(t){e(t.currentTarget).addClass(\"ui-state-hover\")},mouseleave:function(t){e(t.currentTarget).removeClass(\"ui-state-hover\")}})},_focusable:function(t){this.focusable=this.focusable.add(t),this._on(t,{focusin:function(t){e(t.currentTarget).addClass(\"ui-state-focus\")},focusout:function(t){e(t.currentTarget).removeClass(\"ui-state-focus\")}})},_trigger:function(t,i,s){var n,a,o=this.options[t];if(s=s||{},i=e.Event(i),i.type=(t===this.widgetEventPrefix?t:this.widgetEventPrefix+t).toLowerCase(),i.target=this.element[0],a=i.originalEvent)for(n in a)n in i||(i[n]=a[n]);return this.element.trigger(i,s),!(e.isFunction(o)&&o.apply(this.element[0],[i].concat(s))===!1||i.isDefaultPrevented())}},e.each({show:\"fadeIn\",hide:\"fadeOut\"},function(t,i){e.Widget.prototype[\"_\"+t]=function(s,n,a){\"string\"==typeof n&&(n={effect:n});var o,r=n?n===!0||\"number\"==typeof n?i:n.effect||i:t;n=n||{},\"number\"==typeof n&&(n={duration:n}),o=!e.isEmptyObject(n),n.complete=a,n.delay&&s.delay(n.delay),o&&e.effects&&e.effects.effect[r]?s[t](n):r!==t&&s[r]?s[r](n.duration,n.easing,a):s.queue(function(i){e(this)[t](),a&&a.call(s[0]),i()})}}),e.widget;var d=!1;e(document).mouseup(function(){d=!1}),e.widget(\"ui.mouse\",{version:\"1.11.4\",options:{cancel:\"input,textarea,button,select,option\",distance:1,delay:0},_mouseInit:function(){var t=this;this.element.bind(\"mousedown.\"+this.widgetName,function(e){return t._mouseDown(e)}).bind(\"click.\"+this.widgetName,function(i){return!0===e.data(i.target,t.widgetName+\".preventClickEvent\")?(e.removeData(i.target,t.widgetName+\".preventClickEvent\"),i.stopImmediatePropagation(),!1):void 0}),this.started=!1},_mouseDestroy:function(){this.element.unbind(\".\"+this.widgetName),this._mouseMoveDelegate&&this.document.unbind(\"mousemove.\"+this.widgetName,this._mouseMoveDelegate).unbind(\"mouseup.\"+this.widgetName,this._mouseUpDelegate)},_mouseDown:function(t){if(!d){this._mouseMoved=!1,this._mouseStarted&&this._mouseUp(t),this._mouseDownEvent=t;var i=this,s=1===t.which,n=\"string\"==typeof this.options.cancel&&t.target.nodeName?e(t.target).closest(this.options.cancel).length:!1;return s&&!n&&this._mouseCapture(t)?(this.mouseDelayMet=!this.options.delay,this.mouseDelayMet||(this._mouseDelayTimer=setTimeout(function(){i.mouseDelayMet=!0},this.options.delay)),this._mouseDistanceMet(t)&&this._mouseDelayMet(t)&&(this._mouseStarted=this._mouseStart(t)!==!1,!this._mouseStarted)?(t.preventDefault(),!0):(!0===e.data(t.target,this.widgetName+\".preventClickEvent\")&&e.removeData(t.target,this.widgetName+\".preventClickEvent\"),this._mouseMoveDelegate=function(e){return i._mouseMove(e)},this._mouseUpDelegate=function(e){return i._mouseUp(e)},this.document.bind(\"mousemove.\"+this.widgetName,this._mouseMoveDelegate).bind(\"mouseup.\"+this.widgetName,this._mouseUpDelegate),t.preventDefault(),d=!0,!0)):!0}},_mouseMove:function(t){if(this._mouseMoved){if(e.ui.ie&&(!document.documentMode||9>document.documentMode)&&!t.button)return this._mouseUp(t);if(!t.which)return this._mouseUp(t)}return(t.which||t.button)&&(this._mouseMoved=!0),this._mouseStarted?(this._mouseDrag(t),t.preventDefault()):(this._mouseDistanceMet(t)&&this._mouseDelayMet(t)&&(this._mouseStarted=this._mouseStart(this._mouseDownEvent,t)!==!1,this._mouseStarted?this._mouseDrag(t):this._mouseUp(t)),!this._mouseStarted)},_mouseUp:function(t){return this.document.unbind(\"mousemove.\"+this.widgetName,this._mouseMoveDelegate).unbind(\"mouseup.\"+this.widgetName,this._mouseUpDelegate),this._mouseStarted&&(this._mouseStarted=!1,t.target===this._mouseDownEvent.target&&e.data(t.target,this.widgetName+\".preventClickEvent\",!0),this._mouseStop(t)),d=!1,!1},_mouseDistanceMet:function(e){return Math.max(Math.abs(this._mouseDownEvent.pageX-e.pageX),Math.abs(this._mouseDownEvent.pageY-e.pageY))>=this.options.distance},_mouseDelayMet:function(){return this.mouseDelayMet},_mouseStart:function(){},_mouseDrag:function(){},_mouseStop:function(){},_mouseCapture:function(){return!0}}),function(){function t(e,t,i){return[parseFloat(e[0])*(p.test(e[0])?t/100:1),parseFloat(e[1])*(p.test(e[1])?i/100:1)]}function i(t,i){return parseInt(e.css(t,i),10)||0}function s(t){var i=t[0];return 9===i.nodeType?{width:t.width(),height:t.height(),offset:{top:0,left:0}}:e.isWindow(i)?{width:t.width(),height:t.height(),offset:{top:t.scrollTop(),left:t.scrollLeft()}}:i.preventDefault?{width:0,height:0,offset:{top:i.pageY,left:i.pageX}}:{width:t.outerWidth(),height:t.outerHeight(),offset:t.offset()}}e.ui=e.ui||{};var n,a,o=Math.max,r=Math.abs,h=Math.round,l=/left|center|right/,u=/top|center|bottom/,d=/[\\+\\-]\\d+(\\.[\\d]+)?%?/,c=/^\\w+/,p=/%$/,f=e.fn.position;e.position={scrollbarWidth:function(){if(void 0!==n)return n;var t,i,s=e(\"
    \"),a=s.children()[0];return e(\"body\").append(s),t=a.offsetWidth,s.css(\"overflow\",\"scroll\"),i=a.offsetWidth,t===i&&(i=s[0].clientWidth),s.remove(),n=t-i},getScrollInfo:function(t){var i=t.isWindow||t.isDocument?\"\":t.element.css(\"overflow-x\"),s=t.isWindow||t.isDocument?\"\":t.element.css(\"overflow-y\"),n=\"scroll\"===i||\"auto\"===i&&t.widthi?\"left\":t>0?\"right\":\"center\",vertical:0>a?\"top\":s>0?\"bottom\":\"middle\"};d>m&&m>r(t+i)&&(h.horizontal=\"center\"),c>g&&g>r(s+a)&&(h.vertical=\"middle\"),h.important=o(r(t),r(i))>o(r(s),r(a))?\"horizontal\":\"vertical\",n.using.call(this,e,h)}),u.offset(e.extend(M,{using:l}))})},e.ui.position={fit:{left:function(e,t){var i,s=t.within,n=s.isWindow?s.scrollLeft:s.offset.left,a=s.width,r=e.left-t.collisionPosition.marginLeft,h=n-r,l=r+t.collisionWidth-a-n;t.collisionWidth>a?h>0&&0>=l?(i=e.left+h+t.collisionWidth-a-n,e.left+=h-i):e.left=l>0&&0>=h?n:h>l?n+a-t.collisionWidth:n:h>0?e.left+=h:l>0?e.left-=l:e.left=o(e.left-r,e.left)},top:function(e,t){var i,s=t.within,n=s.isWindow?s.scrollTop:s.offset.top,a=t.within.height,r=e.top-t.collisionPosition.marginTop,h=n-r,l=r+t.collisionHeight-a-n;t.collisionHeight>a?h>0&&0>=l?(i=e.top+h+t.collisionHeight-a-n,e.top+=h-i):e.top=l>0&&0>=h?n:h>l?n+a-t.collisionHeight:n:h>0?e.top+=h:l>0?e.top-=l:e.top=o(e.top-r,e.top)}},flip:{left:function(e,t){var i,s,n=t.within,a=n.offset.left+n.scrollLeft,o=n.width,h=n.isWindow?n.scrollLeft:n.offset.left,l=e.left-t.collisionPosition.marginLeft,u=l-h,d=l+t.collisionWidth-o-h,c=\"left\"===t.my[0]?-t.elemWidth:\"right\"===t.my[0]?t.elemWidth:0,p=\"left\"===t.at[0]?t.targetWidth:\"right\"===t.at[0]?-t.targetWidth:0,f=-2*t.offset[0];0>u?(i=e.left+c+p+f+t.collisionWidth-o-a,(0>i||r(u)>i)&&(e.left+=c+p+f)):d>0&&(s=e.left-t.collisionPosition.marginLeft+c+p+f-h,(s>0||d>r(s))&&(e.left+=c+p+f))},top:function(e,t){var i,s,n=t.within,a=n.offset.top+n.scrollTop,o=n.height,h=n.isWindow?n.scrollTop:n.offset.top,l=e.top-t.collisionPosition.marginTop,u=l-h,d=l+t.collisionHeight-o-h,c=\"top\"===t.my[1],p=c?-t.elemHeight:\"bottom\"===t.my[1]?t.elemHeight:0,f=\"top\"===t.at[1]?t.targetHeight:\"bottom\"===t.at[1]?-t.targetHeight:0,m=-2*t.offset[1];0>u?(s=e.top+p+f+m+t.collisionHeight-o-a,(0>s||r(u)>s)&&(e.top+=p+f+m)):d>0&&(i=e.top-t.collisionPosition.marginTop+p+f+m-h,(i>0||d>r(i))&&(e.top+=p+f+m))}},flipfit:{left:function(){e.ui.position.flip.left.apply(this,arguments),e.ui.position.fit.left.apply(this,arguments)},top:function(){e.ui.position.flip.top.apply(this,arguments),e.ui.position.fit.top.apply(this,arguments)}}},function(){var t,i,s,n,o,r=document.getElementsByTagName(\"body\")[0],h=document.createElement(\"div\");t=document.createElement(r?\"div\":\"body\"),s={visibility:\"hidden\",width:0,height:0,border:0,margin:0,background:\"none\"},r&&e.extend(s,{position:\"absolute\",left:\"-1000px\",top:\"-1000px\"});for(o in s)t.style[o]=s[o];t.appendChild(h),i=r||document.documentElement,i.insertBefore(t,i.firstChild),h.style.cssText=\"position: absolute; left: 10.7432222px;\",n=e(h).offset().left,a=n>10&&11>n,t.innerHTML=\"\",i.removeChild(t)}()}(),e.ui.position,e.widget(\"ui.accordion\",{version:\"1.11.4\",options:{active:0,animate:{},collapsible:!1,event:\"click\",header:\"> li > :first-child,> :not(li):even\",heightStyle:\"auto\",icons:{activeHeader:\"ui-icon-triangle-1-s\",header:\"ui-icon-triangle-1-e\"},activate:null,beforeActivate:null},hideProps:{borderTopWidth:\"hide\",borderBottomWidth:\"hide\",paddingTop:\"hide\",paddingBottom:\"hide\",height:\"hide\"},showProps:{borderTopWidth:\"show\",borderBottomWidth:\"show\",paddingTop:\"show\",paddingBottom:\"show\",height:\"show\"},_create:function(){var t=this.options;this.prevShow=this.prevHide=e(),this.element.addClass(\"ui-accordion ui-widget ui-helper-reset\").attr(\"role\",\"tablist\"),t.collapsible||t.active!==!1&&null!=t.active||(t.active=0),this._processPanels(),0>t.active&&(t.active+=this.headers.length),this._refresh()},_getCreateEventData:function(){return{header:this.active,panel:this.active.length?this.active.next():e()}},_createIcons:function(){var t=this.options.icons;t&&(e(\"\").addClass(\"ui-accordion-header-icon ui-icon \"+t.header).prependTo(this.headers),this.active.children(\".ui-accordion-header-icon\").removeClass(t.header).addClass(t.activeHeader),this.headers.addClass(\"ui-accordion-icons\"))},_destroyIcons:function(){this.headers.removeClass(\"ui-accordion-icons\").children(\".ui-accordion-header-icon\").remove()},_destroy:function(){var e;this.element.removeClass(\"ui-accordion ui-widget ui-helper-reset\").removeAttr(\"role\"),this.headers.removeClass(\"ui-accordion-header ui-accordion-header-active ui-state-default ui-corner-all ui-state-active ui-state-disabled ui-corner-top\").removeAttr(\"role\").removeAttr(\"aria-expanded\").removeAttr(\"aria-selected\").removeAttr(\"aria-controls\").removeAttr(\"tabIndex\").removeUniqueId(),this._destroyIcons(),e=this.headers.next().removeClass(\"ui-helper-reset ui-widget-content ui-corner-bottom ui-accordion-content ui-accordion-content-active ui-state-disabled\").css(\"display\",\"\").removeAttr(\"role\").removeAttr(\"aria-hidden\").removeAttr(\"aria-labelledby\").removeUniqueId(),\"content\"!==this.options.heightStyle&&e.css(\"height\",\"\")},_setOption:function(e,t){return\"active\"===e?(this._activate(t),void 0):(\"event\"===e&&(this.options.event&&this._off(this.headers,this.options.event),this._setupEvents(t)),this._super(e,t),\"collapsible\"!==e||t||this.options.active!==!1||this._activate(0),\"icons\"===e&&(this._destroyIcons(),t&&this._createIcons()),\"disabled\"===e&&(this.element.toggleClass(\"ui-state-disabled\",!!t).attr(\"aria-disabled\",t),this.headers.add(this.headers.next()).toggleClass(\"ui-state-disabled\",!!t)),void 0)},_keydown:function(t){if(!t.altKey&&!t.ctrlKey){var i=e.ui.keyCode,s=this.headers.length,n=this.headers.index(t.target),a=!1;switch(t.keyCode){case i.RIGHT:case i.DOWN:a=this.headers[(n+1)%s];break;case i.LEFT:case i.UP:a=this.headers[(n-1+s)%s];break;case i.SPACE:case i.ENTER:this._eventHandler(t);break;case i.HOME:a=this.headers[0];break;case i.END:a=this.headers[s-1]}a&&(e(t.target).attr(\"tabIndex\",-1),e(a).attr(\"tabIndex\",0),a.focus(),t.preventDefault())}},_panelKeyDown:function(t){t.keyCode===e.ui.keyCode.UP&&t.ctrlKey&&e(t.currentTarget).prev().focus()},refresh:function(){var t=this.options;this._processPanels(),t.active===!1&&t.collapsible===!0||!this.headers.length?(t.active=!1,this.active=e()):t.active===!1?this._activate(0):this.active.length&&!e.contains(this.element[0],this.active[0])?this.headers.length===this.headers.find(\".ui-state-disabled\").length?(t.active=!1,this.active=e()):this._activate(Math.max(0,t.active-1)):t.active=this.headers.index(this.active),this._destroyIcons(),this._refresh()},_processPanels:function(){var e=this.headers,t=this.panels;this.headers=this.element.find(this.options.header).addClass(\"ui-accordion-header ui-state-default ui-corner-all\"),this.panels=this.headers.next().addClass(\"ui-accordion-content ui-helper-reset ui-widget-content ui-corner-bottom\").filter(\":not(.ui-accordion-content-active)\").hide(),t&&(this._off(e.not(this.headers)),this._off(t.not(this.panels)))},_refresh:function(){var t,i=this.options,s=i.heightStyle,n=this.element.parent();this.active=this._findActive(i.active).addClass(\"ui-accordion-header-active ui-state-active ui-corner-top\").removeClass(\"ui-corner-all\"),this.active.next().addClass(\"ui-accordion-content-active\").show(),this.headers.attr(\"role\",\"tab\").each(function(){var t=e(this),i=t.uniqueId().attr(\"id\"),s=t.next(),n=s.uniqueId().attr(\"id\");t.attr(\"aria-controls\",n),s.attr(\"aria-labelledby\",i)}).next().attr(\"role\",\"tabpanel\"),this.headers.not(this.active).attr({\"aria-selected\":\"false\",\"aria-expanded\":\"false\",tabIndex:-1}).next().attr({\"aria-hidden\":\"true\"}).hide(),this.active.length?this.active.attr({\"aria-selected\":\"true\",\"aria-expanded\":\"true\",tabIndex:0}).next().attr({\"aria-hidden\":\"false\"}):this.headers.eq(0).attr(\"tabIndex\",0),this._createIcons(),this._setupEvents(i.event),\"fill\"===s?(t=n.height(),this.element.siblings(\":visible\").each(function(){var i=e(this),s=i.css(\"position\");\"absolute\"!==s&&\"fixed\"!==s&&(t-=i.outerHeight(!0))}),this.headers.each(function(){t-=e(this).outerHeight(!0)}),this.headers.next().each(function(){e(this).height(Math.max(0,t-e(this).innerHeight()+e(this).height()))}).css(\"overflow\",\"auto\")):\"auto\"===s&&(t=0,this.headers.next().each(function(){t=Math.max(t,e(this).css(\"height\",\"\").height())}).height(t))},_activate:function(t){var i=this._findActive(t)[0];i!==this.active[0]&&(i=i||this.active[0],this._eventHandler({target:i,currentTarget:i,preventDefault:e.noop}))},_findActive:function(t){return\"number\"==typeof t?this.headers.eq(t):e()},_setupEvents:function(t){var i={keydown:\"_keydown\"};t&&e.each(t.split(\" \"),function(e,t){i[t]=\"_eventHandler\"}),this._off(this.headers.add(this.headers.next())),this._on(this.headers,i),this._on(this.headers.next(),{keydown:\"_panelKeyDown\"}),this._hoverable(this.headers),this._focusable(this.headers)},_eventHandler:function(t){var i=this.options,s=this.active,n=e(t.currentTarget),a=n[0]===s[0],o=a&&i.collapsible,r=o?e():n.next(),h=s.next(),l={oldHeader:s,oldPanel:h,newHeader:o?e():n,newPanel:r};t.preventDefault(),a&&!i.collapsible||this._trigger(\"beforeActivate\",t,l)===!1||(i.active=o?!1:this.headers.index(n),this.active=a?e():n,this._toggle(l),s.removeClass(\"ui-accordion-header-active ui-state-active\"),i.icons&&s.children(\".ui-accordion-header-icon\").removeClass(i.icons.activeHeader).addClass(i.icons.header),a||(n.removeClass(\"ui-corner-all\").addClass(\"ui-accordion-header-active ui-state-active ui-corner-top\"),i.icons&&n.children(\".ui-accordion-header-icon\").removeClass(i.icons.header).addClass(i.icons.activeHeader),n.next().addClass(\"ui-accordion-content-active\")))},_toggle:function(t){var i=t.newPanel,s=this.prevShow.length?this.prevShow:t.oldPanel;this.prevShow.add(this.prevHide).stop(!0,!0),this.prevShow=i,this.prevHide=s,this.options.animate?this._animate(i,s,t):(s.hide(),i.show(),this._toggleComplete(t)),s.attr({\"aria-hidden\":\"true\"}),s.prev().attr({\"aria-selected\":\"false\",\"aria-expanded\":\"false\"}),i.length&&s.length?s.prev().attr({tabIndex:-1,\"aria-expanded\":\"false\"}):i.length&&this.headers.filter(function(){return 0===parseInt(e(this).attr(\"tabIndex\"),10)}).attr(\"tabIndex\",-1),i.attr(\"aria-hidden\",\"false\").prev().attr({\"aria-selected\":\"true\",\"aria-expanded\":\"true\",tabIndex:0})},_animate:function(e,t,i){var s,n,a,o=this,r=0,h=e.css(\"box-sizing\"),l=e.length&&(!t.length||e.index()\",delay:300,options:{icons:{submenu:\"ui-icon-carat-1-e\"},items:\"> *\",menus:\"ul\",position:{my:\"left-1 top\",at:\"right top\"},role:\"menu\",blur:null,focus:null,select:null},_create:function(){this.activeMenu=this.element,this.mouseHandled=!1,this.element.uniqueId().addClass(\"ui-menu ui-widget ui-widget-content\").toggleClass(\"ui-menu-icons\",!!this.element.find(\".ui-icon\").length).attr({role:this.options.role,tabIndex:0}),this.options.disabled&&this.element.addClass(\"ui-state-disabled\").attr(\"aria-disabled\",\"true\"),this._on({\"mousedown .ui-menu-item\":function(e){e.preventDefault()},\"click .ui-menu-item\":function(t){var i=e(t.target);!this.mouseHandled&&i.not(\".ui-state-disabled\").length&&(this.select(t),t.isPropagationStopped()||(this.mouseHandled=!0),i.has(\".ui-menu\").length?this.expand(t):!this.element.is(\":focus\")&&e(this.document[0].activeElement).closest(\".ui-menu\").length&&(this.element.trigger(\"focus\",[!0]),this.active&&1===this.active.parents(\".ui-menu\").length&&clearTimeout(this.timer)))},\"mouseenter .ui-menu-item\":function(t){if(!this.previousFilter){var i=e(t.currentTarget);\ni.siblings(\".ui-state-active\").removeClass(\"ui-state-active\"),this.focus(t,i)}},mouseleave:\"collapseAll\",\"mouseleave .ui-menu\":\"collapseAll\",focus:function(e,t){var i=this.active||this.element.find(this.options.items).eq(0);t||this.focus(e,i)},blur:function(t){this._delay(function(){e.contains(this.element[0],this.document[0].activeElement)||this.collapseAll(t)})},keydown:\"_keydown\"}),this.refresh(),this._on(this.document,{click:function(e){this._closeOnDocumentClick(e)&&this.collapseAll(e),this.mouseHandled=!1}})},_destroy:function(){this.element.removeAttr(\"aria-activedescendant\").find(\".ui-menu\").addBack().removeClass(\"ui-menu ui-widget ui-widget-content ui-menu-icons ui-front\").removeAttr(\"role\").removeAttr(\"tabIndex\").removeAttr(\"aria-labelledby\").removeAttr(\"aria-expanded\").removeAttr(\"aria-hidden\").removeAttr(\"aria-disabled\").removeUniqueId().show(),this.element.find(\".ui-menu-item\").removeClass(\"ui-menu-item\").removeAttr(\"role\").removeAttr(\"aria-disabled\").removeUniqueId().removeClass(\"ui-state-hover\").removeAttr(\"tabIndex\").removeAttr(\"role\").removeAttr(\"aria-haspopup\").children().each(function(){var t=e(this);t.data(\"ui-menu-submenu-carat\")&&t.remove()}),this.element.find(\".ui-menu-divider\").removeClass(\"ui-menu-divider ui-widget-content\")},_keydown:function(t){var i,s,n,a,o=!0;switch(t.keyCode){case e.ui.keyCode.PAGE_UP:this.previousPage(t);break;case e.ui.keyCode.PAGE_DOWN:this.nextPage(t);break;case e.ui.keyCode.HOME:this._move(\"first\",\"first\",t);break;case e.ui.keyCode.END:this._move(\"last\",\"last\",t);break;case e.ui.keyCode.UP:this.previous(t);break;case e.ui.keyCode.DOWN:this.next(t);break;case e.ui.keyCode.LEFT:this.collapse(t);break;case e.ui.keyCode.RIGHT:this.active&&!this.active.is(\".ui-state-disabled\")&&this.expand(t);break;case e.ui.keyCode.ENTER:case e.ui.keyCode.SPACE:this._activate(t);break;case e.ui.keyCode.ESCAPE:this.collapse(t);break;default:o=!1,s=this.previousFilter||\"\",n=String.fromCharCode(t.keyCode),a=!1,clearTimeout(this.filterTimer),n===s?a=!0:n=s+n,i=this._filterMenuItems(n),i=a&&-1!==i.index(this.active.next())?this.active.nextAll(\".ui-menu-item\"):i,i.length||(n=String.fromCharCode(t.keyCode),i=this._filterMenuItems(n)),i.length?(this.focus(t,i),this.previousFilter=n,this.filterTimer=this._delay(function(){delete this.previousFilter},1e3)):delete this.previousFilter}o&&t.preventDefault()},_activate:function(e){this.active.is(\".ui-state-disabled\")||(this.active.is(\"[aria-haspopup='true']\")?this.expand(e):this.select(e))},refresh:function(){var t,i,s=this,n=this.options.icons.submenu,a=this.element.find(this.options.menus);this.element.toggleClass(\"ui-menu-icons\",!!this.element.find(\".ui-icon\").length),a.filter(\":not(.ui-menu)\").addClass(\"ui-menu ui-widget ui-widget-content ui-front\").hide().attr({role:this.options.role,\"aria-hidden\":\"true\",\"aria-expanded\":\"false\"}).each(function(){var t=e(this),i=t.parent(),s=e(\"\").addClass(\"ui-menu-icon ui-icon \"+n).data(\"ui-menu-submenu-carat\",!0);i.attr(\"aria-haspopup\",\"true\").prepend(s),t.attr(\"aria-labelledby\",i.attr(\"id\"))}),t=a.add(this.element),i=t.find(this.options.items),i.not(\".ui-menu-item\").each(function(){var t=e(this);s._isDivider(t)&&t.addClass(\"ui-widget-content ui-menu-divider\")}),i.not(\".ui-menu-item, .ui-menu-divider\").addClass(\"ui-menu-item\").uniqueId().attr({tabIndex:-1,role:this._itemRole()}),i.filter(\".ui-state-disabled\").attr(\"aria-disabled\",\"true\"),this.active&&!e.contains(this.element[0],this.active[0])&&this.blur()},_itemRole:function(){return{menu:\"menuitem\",listbox:\"option\"}[this.options.role]},_setOption:function(e,t){\"icons\"===e&&this.element.find(\".ui-menu-icon\").removeClass(this.options.icons.submenu).addClass(t.submenu),\"disabled\"===e&&this.element.toggleClass(\"ui-state-disabled\",!!t).attr(\"aria-disabled\",t),this._super(e,t)},focus:function(e,t){var i,s;this.blur(e,e&&\"focus\"===e.type),this._scrollIntoView(t),this.active=t.first(),s=this.active.addClass(\"ui-state-focus\").removeClass(\"ui-state-active\"),this.options.role&&this.element.attr(\"aria-activedescendant\",s.attr(\"id\")),this.active.parent().closest(\".ui-menu-item\").addClass(\"ui-state-active\"),e&&\"keydown\"===e.type?this._close():this.timer=this._delay(function(){this._close()},this.delay),i=t.children(\".ui-menu\"),i.length&&e&&/^mouse/.test(e.type)&&this._startOpening(i),this.activeMenu=t.parent(),this._trigger(\"focus\",e,{item:t})},_scrollIntoView:function(t){var i,s,n,a,o,r;this._hasScroll()&&(i=parseFloat(e.css(this.activeMenu[0],\"borderTopWidth\"))||0,s=parseFloat(e.css(this.activeMenu[0],\"paddingTop\"))||0,n=t.offset().top-this.activeMenu.offset().top-i-s,a=this.activeMenu.scrollTop(),o=this.activeMenu.height(),r=t.outerHeight(),0>n?this.activeMenu.scrollTop(a+n):n+r>o&&this.activeMenu.scrollTop(a+n-o+r))},blur:function(e,t){t||clearTimeout(this.timer),this.active&&(this.active.removeClass(\"ui-state-focus\"),this.active=null,this._trigger(\"blur\",e,{item:this.active}))},_startOpening:function(e){clearTimeout(this.timer),\"true\"===e.attr(\"aria-hidden\")&&(this.timer=this._delay(function(){this._close(),this._open(e)},this.delay))},_open:function(t){var i=e.extend({of:this.active},this.options.position);clearTimeout(this.timer),this.element.find(\".ui-menu\").not(t.parents(\".ui-menu\")).hide().attr(\"aria-hidden\",\"true\"),t.show().removeAttr(\"aria-hidden\").attr(\"aria-expanded\",\"true\").position(i)},collapseAll:function(t,i){clearTimeout(this.timer),this.timer=this._delay(function(){var s=i?this.element:e(t&&t.target).closest(this.element.find(\".ui-menu\"));s.length||(s=this.element),this._close(s),this.blur(t),this.activeMenu=s},this.delay)},_close:function(e){e||(e=this.active?this.active.parent():this.element),e.find(\".ui-menu\").hide().attr(\"aria-hidden\",\"true\").attr(\"aria-expanded\",\"false\").end().find(\".ui-state-active\").not(\".ui-state-focus\").removeClass(\"ui-state-active\")},_closeOnDocumentClick:function(t){return!e(t.target).closest(\".ui-menu\").length},_isDivider:function(e){return!/[^\\-\\u2014\\u2013\\s]/.test(e.text())},collapse:function(e){var t=this.active&&this.active.parent().closest(\".ui-menu-item\",this.element);t&&t.length&&(this._close(),this.focus(e,t))},expand:function(e){var t=this.active&&this.active.children(\".ui-menu \").find(this.options.items).first();t&&t.length&&(this._open(t.parent()),this._delay(function(){this.focus(e,t)}))},next:function(e){this._move(\"next\",\"first\",e)},previous:function(e){this._move(\"prev\",\"last\",e)},isFirstItem:function(){return this.active&&!this.active.prevAll(\".ui-menu-item\").length},isLastItem:function(){return this.active&&!this.active.nextAll(\".ui-menu-item\").length},_move:function(e,t,i){var s;this.active&&(s=\"first\"===e||\"last\"===e?this.active[\"first\"===e?\"prevAll\":\"nextAll\"](\".ui-menu-item\").eq(-1):this.active[e+\"All\"](\".ui-menu-item\").eq(0)),s&&s.length&&this.active||(s=this.activeMenu.find(this.options.items)[t]()),this.focus(i,s)},nextPage:function(t){var i,s,n;return this.active?(this.isLastItem()||(this._hasScroll()?(s=this.active.offset().top,n=this.element.height(),this.active.nextAll(\".ui-menu-item\").each(function(){return i=e(this),0>i.offset().top-s-n}),this.focus(t,i)):this.focus(t,this.activeMenu.find(this.options.items)[this.active?\"last\":\"first\"]())),void 0):(this.next(t),void 0)},previousPage:function(t){var i,s,n;return this.active?(this.isFirstItem()||(this._hasScroll()?(s=this.active.offset().top,n=this.element.height(),this.active.prevAll(\".ui-menu-item\").each(function(){return i=e(this),i.offset().top-s+n>0}),this.focus(t,i)):this.focus(t,this.activeMenu.find(this.options.items).first())),void 0):(this.next(t),void 0)},_hasScroll:function(){return this.element.outerHeight()\",options:{appendTo:null,autoFocus:!1,delay:300,minLength:1,position:{my:\"left top\",at:\"left bottom\",collision:\"none\"},source:null,change:null,close:null,focus:null,open:null,response:null,search:null,select:null},requestIndex:0,pending:0,_create:function(){var t,i,s,n=this.element[0].nodeName.toLowerCase(),a=\"textarea\"===n,o=\"input\"===n;this.isMultiLine=a?!0:o?!1:this.element.prop(\"isContentEditable\"),this.valueMethod=this.element[a||o?\"val\":\"text\"],this.isNewMenu=!0,this.element.addClass(\"ui-autocomplete-input\").attr(\"autocomplete\",\"off\"),this._on(this.element,{keydown:function(n){if(this.element.prop(\"readOnly\"))return t=!0,s=!0,i=!0,void 0;t=!1,s=!1,i=!1;var a=e.ui.keyCode;switch(n.keyCode){case a.PAGE_UP:t=!0,this._move(\"previousPage\",n);break;case a.PAGE_DOWN:t=!0,this._move(\"nextPage\",n);break;case a.UP:t=!0,this._keyEvent(\"previous\",n);break;case a.DOWN:t=!0,this._keyEvent(\"next\",n);break;case a.ENTER:this.menu.active&&(t=!0,n.preventDefault(),this.menu.select(n));break;case a.TAB:this.menu.active&&this.menu.select(n);break;case a.ESCAPE:this.menu.element.is(\":visible\")&&(this.isMultiLine||this._value(this.term),this.close(n),n.preventDefault());break;default:i=!0,this._searchTimeout(n)}},keypress:function(s){if(t)return t=!1,(!this.isMultiLine||this.menu.element.is(\":visible\"))&&s.preventDefault(),void 0;if(!i){var n=e.ui.keyCode;switch(s.keyCode){case n.PAGE_UP:this._move(\"previousPage\",s);break;case n.PAGE_DOWN:this._move(\"nextPage\",s);break;case n.UP:this._keyEvent(\"previous\",s);break;case n.DOWN:this._keyEvent(\"next\",s)}}},input:function(e){return s?(s=!1,e.preventDefault(),void 0):(this._searchTimeout(e),void 0)},focus:function(){this.selectedItem=null,this.previous=this._value()},blur:function(e){return this.cancelBlur?(delete this.cancelBlur,void 0):(clearTimeout(this.searching),this.close(e),this._change(e),void 0)}}),this._initSource(),this.menu=e(\"