�PNG  IHDR����Q�gAMA�� �a cHRMz&�����u0�`:�p��Q<�bKGD�������gmIDATx���w�U���ﹻ�& �^C��X(�����J I@� ���"% (** B�X� �+*�i��"]j(IH�{~�R)��[��~��>h��{�}g�y�)I�$I��j��� ������.I�$I�$�ʊ�y@�}x�.�: �$I�$I��i}��V�Z�����PC)I�$I��F� ����^��0ʐJ�$I�$�Q^���}{�"���r�=��OzI��$gR�ZeC.�IO����vH eK��X� �����$IM�px��sk�.��쒷/��&���r�[޳����<���v|�� ��.I���~�)@������$�up��dY�R�����a�$I �|�M�.�e ��Jaֶ�pS�Y�R��6j��>h�%IR��ز�� �i�f&���u�J)������M�����$I vL���i���=�H;�7UJ�,�]�,X��$I��1��AҒ�J����$ X�Y� XzI��@G����N��ҥR���T)E��@��;��]K*�M�w;#�5_�wO�n~\ DC&�$(A�5 �R�R��FkvIR���}�l��!�RytRl;��~^Ƿ�Jj� �اy�뷦BZ��Jr����&ӥ�8�P�j��w~�vn�����v ���X���^�(I;�4�R=�P[�3]J�,�]ȏ�~��:�3�?��[��� ��a��&e)`�e*����P[�4]�T��=Cq�6�R[ ~ޤ����r�XR Հg(�t�_HZ�-Hg �M�$�ãm�L5�R� �u��k�*`%C-�E6/����%[�t X.{��8�P9Z�������.vk����XŐKj����gKZ��Hg�(����aK9ڦ��mKj��Ѻm�_ \�#�$5�,)- � �61eJ�,��5m|� ��r�'��=��� ��&ڡd���%-]J �on�� X���m|�{ ��R�Ҟ����e $eڧY X��Y�����rԮ-a�7�RK�6h���>n$5A�V�ڴ�i��*�ֆ�K)���mѦ���tm�r�1p| �q:흺,)O�����i��*�ֺ�K)���ܬ�֦����K-5�r�3�>0ԔHj��Jئ�EZ��j�,%��r�e��~�/��z��%j�V��M��ڸ�mr��t)��3]J�,���T ��K֦O�vԒg��i�i��*�����bK�i�NO~�%�P��W���0=�d��i�i�������2�t�J9��J����ݕ�{�7�"I P��9�JK�Tb�u,%��r���"�6�RKU��}�Ij�2����HK�Z�XJ�,妝�� X��Y�����rP��� ެ�2�4�c��%i��^���IK|.H�,%��r�b���:XR�l��1X��4Pe/`����x��&����P��8�Pj��28��M��z���s��x���2���r�\���zR��P�z�4J����}�y���P[g=�L)��� .Q[�6Rj�Wgp ��FI�H�*-`I�����M�RaK9T��X��c�q�����*I� y���[j��E>cw%��gL�R���ԕi�F�Cj�-��ď�a`������#e~���I�� j�,%��r�,)?[gp �FI˨���mn��WX#��>mʔ X�A��� �DZf9,�nKҲz�����I��Z�XJ�,�L#��k�i��P�z�4JZF�����,�I,`���61%�2s �$���,�VO��Ϛ2��/U��FJ�fy��7����K�> X�+�6� S�TX����Ie�����JI���Lz�M�fKm ��L�RaK9�%|��4p9L�w�JI��!`N����sia���zĔ)������%-� X��M���q�>�pk�$-$�Q���2����x#�N� ؎�-�QR��}ᶦHZ�ډ��)�J�,�l#��i@y�n3������L��N`���;�nڔ X�����u����X5��p��F)��m|��^�0(��>B�H���F9(c�զE��er��JI rg��7 ��4I@z�0\�JI��������i�䵙���RR��0�s;�$�s6eJ�,�`n �䂦���0�a�)S)�A������ ���1eJ�,堌#�6�35R��I�gpN��Hu������TH���_S���ԕqV�����e `� ��&S)���>�p;S$魁eKI����uX��`I�����4��춒�o}`m�$1"��:�PI���<[�v9�^�\p��TJj�r�iRŭ ��P{#�{R2,`)e-`mgj�~�1�ϣ�L��Kam�7�&U\j�/�3mJ�,�`F��;M��'�䱀�� .KR#��)y�h�Tq�;p���cK9(���q!w�?����u�RR,n.yw�*UXj#�\�]ɱ���(q�v2=R���q����f����B#i��Jm�m�L����<]�Y����͙�#�$5� ��u�TU�7��Ӧ�X�R+q�,`I}����qL�����'���`��6�K�ͷ�6���r�,�]����0S$-���� ��[RKR3���o��iRE����|�nӦ�X�R.�(i�:�L��D�L��TJj�Y��%o�:����)�����6���r�x���zҒ��q��TJj��h㞦I���.�$Y�R.ʼ�n�GZ�\ֿ��f:%5�5 ��I�˼!�6����dK�x��m��4E�"mG�_�� �s? �.e*��?L�RfK9��%�����q�#�uh$�)�i���3U�����L�RfK9yx��m܌b�j�����8��4���$�i��1U��^@Wbm��4uJ�,�����Ҫ�A�>���_Ij�?1�v�3�2����[�gL�R��D��9�6�o�Ta�R���׿��N7%�����L�2�� NT�,`)7�&�Ɲ��L�*꽙��yp���_$���M�2�#�A�����S�,`)7�$r��k�TA���29�_���Iy�e"�|/0�t)�$�n X�T2���`Y���J���;�6��J�x"�����.e�<�`�����$)� P�I$��5�V4]���29���SRI>�~�=@j�]��l�p�2`K9Jaai�^" Ԋ��29�O�RI%��:X�V5]J��m��N9���]�H;1U���C39���NI%���Xe78�t)a��;���O��i Ҙ�>X�t�"~G>�_mn:%���|~ޅ_�+]�$�o���)�@��ǀ{hgN;�IK�6�G&�rp�)�T2�i�୦K�Ju���v*���T��=�T��O�SV�>(��~D�>d�����m�,I*��Ɛ�:���R�#��ۙNI%��D>G��.n��$�o��;�+#��R�R��!��.e�U��˽���TRI�2��8�t)1L��WϚ>IJ�a3�oF��b��u&���:�tJ*��(F7��y�0�Z�R ^�p���'Ii� ����L�24x�| X��RI%�ۄ>S1]J�y��[z��L�$�adB7��.eh4��%���%�누>W�E���Tf+3�����IR:�I�3Xה)3אO�ۦSR�O'�ٺ�)S�}"��q�O��r[B7�ϙ.edG�)^E���TR"R��t��R�ݜh���0}������<�S����ɧx�.6,)�&���)SI�p��j�'I��?���A�L�"���L����.\TZV�N���!�'I�Y.��pAS����5}� �TRbNL�3��”�d�����b�e��)��4]��Mg/S���Z{ni����,)=k����Д�d��p��ǦO��uLb7�ߛ2%lO�}u�)���K��]le�T�P��j�eS(I�Z���ִ��R�^eJ%%*�/\ Ke�̈́�-O�M�$����|.5eJ��o�s)^]oJ%%,�㚃R� <�p��LS��� �*����`GӦ�tdt<�5���o'��/�6�٧�� _��BIJ�kH��_�6%d ���rQ�b�gZ%%n�ڍ9o1mj�U�g���JR>�L�F�VӦD�B^k_���J�Dj��\����=�L���S(I������v─a���T�eZ%e�U����A�M-�0;�~˃@�i��|l�� �@S���4y���7��2��>���sX-vA�}�ϛBI���!ݎߨ����W�l�*)3{'�Y|�iS�lEڻ(��5�����K��t��SI�$��Uv0��2���,~��ԩ~�x��;�P��4��ց��C�r�O%ty�n4��25:���KM��l�D� ^���4JR������xS��ه�F_}شJ�T�S��6uj�+ﷸk�$e���Z�O%�G�*^�V2�����u3E�Mj�3��k%)ok��I]d�T����)�UR�K���DS� �7�~�m@�TJR�~�荪�f�T"�֛L�� �\���s��M��� �-0��T �K�f�J��z+��n�إK�r� �L�����&j��(��)�[�E&I����� ߴ>e �FW�_�kJR�|!���O�:5�/2跌3��T-�'|�zX�� r�yp0��J����S ~^�F�>-�2�< �`*%�ZFP�)����bS��n"���L�� :)���+pʷf(pO���3��TMW$~����>@~ū:����TA�IsV�1}�S2�<���%��ޟ�M�?@�iT ,E�ū�oz%i�~��g�|`wS(�]�oȤ��8��)�$�� �� ntu`өe�`6y�Pl� Iz�MI{�ʣ�z����ʨ�� �)IZ�2��= ld:5+���請M�$-ї���;�U�>_���g��sY��$Á����N�5��W���z�W�fIZ��)�-��y�u�XI�fp���~S*IZ��dt�;�t�>K�ū��KR�|$���#Lc�Ԁ+2�\�;kJ��`]�Y���ǔ��M1B)��U�bG"IRߊ���<x����ܾ��ӔJ�������0�Z����=��'�Y��嵤����� Le�v�e�g��)�$��z���n����V-º����^�3Ւ�o�f��#0�Tfk�^�Z�s[�*I꯳3{���)�ˬ����W�4Ւ4 �Odp��bZ��R���S��|�*I� �����5��5�#��"�&�-IvT&��/��윚Ye:��i�$ �9��{�Lk�u�R���e�[��I~�_�\��ؠ%�>�GL�$iY�8 �9ܕ��"�S���`kS.I�l���C;Ҏ4���x&�>�u_0J��Lr����<��J�2�(^��$5��L� s�=Mg�V� �~�,Ij�u��>�� 7��r2�)^=G���$�1:��3�G< �`J�3�~�&IR%�� 6���T�x�/�rIj���3�O< �ʔ&#f�_yX��J�i�ގN��Sz;�� T�x�(��i���8%���#���4 �~�AS+Ij��e���r�I�U�rIj�����3�62��v8��8�5�+Ij�A�h�K__5���X��%�n�V%Iͳ-y��|7��XV��2��v4���fzo��_6��8���"�S/I-qbf��;� Lk��F�)K���SM�$���� Ms�>K� W�N���V����}�^`��-�큧3����2Œ�Vؙ�G��d��u�,^�^m�%��6���~��N�n�&�͓��3Œ�V�Z����MsRpfE�W���%I�wd���ǀ�Lm[��7���W&�bIR��L�@Q�|�)*�� ����i ImsI�����MmKm�y��V`�i�$��G+R� 0�t�V'����!���V�)֏���28����v���U�7͒�v���H����ꦼt���x�ꗞ�T ;S���}��7�M�f���+��fIR���H��N��ZUk�U�x5��SA�Jㄌ�9��Mq��μ���AIRi|�j�5��)o����*^���'<$�T����w���I�1��hE�U�^c�_�j�?���Е$%d`z� �c�y�f��,X���O IJ�nTg����A�� �����U�XRD��������� ���}�������{�H�}��^�S,P5��V��2���\����Xx`p�Z����|Y�k:����$e�� ��~ ��@nW�L�.j�+��ϝ���Y��b퇪��bZ� �BV�u�)�u�������/IJ_ �1�[�p.p60�bC�� >|X����9�1P�:��N\�!�5�qUB}5�a5ja `ub��c�VxYt1N�0�Z����z���l4����]7­gKj�]�?�4ϻ� ���*���[��b��g$)+À���*x쳀ogO$~,5� �ز���U��S�����9���� lq3�+5�mgw@��n�p1��sso Ӻ=����|�N6 /�g(�Wv7U��;��zωM=��wk�,0���u��T��g�_��`_�P`�uz?�2�yI��!b��`�k���ĸSo��+Q���x%!\��ο����e����|�އ���ԁK��S-s6��pu���_����(ֿ�$�i+��+�T8=�e�Y;��� �צ��P�+p��h�x��WQ���v���*���|p1��. ��ά. XRk�IQ�Y�P,���d�r�Z�� |����� ��������B�%������w��P|�S5`��~́@�i�޾��� E�;��Չaw{o'�Q��?%�iL{u D��?���������N1��B��D������!�o��w�����PHRe��FZ�*�� ���k�_-~����{����E9�b-��~P�`��f��E{AܶB�J�A�FO��� wx6��R�ox�5 K5����=�W����we�������hS8 (��J���C���l�J���~ p+���F���i�;ŗo+��:�bD�#g(��C��"�wA^�� r.�F�8L;�dzd�IH�U�X��݆�Ϟ�X�g� �)I�F����q��e�m��%I�4�d�j&pp�T�{���'{�HO���x�( Rk���6^C�٫����O.�)�3�:s(��۳(Z�?~ٻ8�9�zmT"�PL�tw䥈��5��&b<8GZ-�Y��&�K�?e8,`I��6���e�����(֍x�b8�3 � `���r�����zX�j��)F�=l($I�j� �2*�(F�?h(/9ik:��I`m#�p3�Mg���L�aKj�c�/U��#�n5����S�# �������m(^)=y=đ�x8Ŭ����I���[U]����~S�цA�4�p���$-F �i(��R�,�7C�x�;X��=�c����I��>���{���Km�\ �o(T��v�2������v�x�2q�i����iDJ�N�,���Ҏ����!1f� �5quB�j��1��!�8 �r���D�Fd(���!���W���Ql��,g�S��k����L��1Bx��g'�'�՞�^���ǘ;�p����Q ���P(c���_ IRu����j�g(�W�z ����b�s�#�P�­rz�>���� k�� c&nB=�q+��ؔX���n#r5����)co���*Ũ�+G��?7��<�� |�P�����Q��ӣ'�G�����`uO�d>%M�ct�z�#�� Ԫ�����ڞ�&�7��CaQ��~N�'��-P�.�W`Oedp0�3C!IZc�I�AMP��U�ۀ5�J�<��\u~+�{�9�(Fb�b���y�A�e�B����hOS���ܳ�1� b��È���T�#��ŠyDžs�����,`5�}��D���C�-�`��̞%r&�ڙa��8�7Q����W����W����p6e7� ��Rϫ/�o����Y� ꇅ N��ܶ�ը��tc��!��L���A ���T�7�V4�J�sū� I-�0����P��x�z7�QN�F���_�i����Z�g�úW�k�G���83� ���0e�Wr9 X����]㾮݁#���Jˢ C�}0��=3�ݱ��tB��i�]�_ ��&�{�{[/�o[�~ \q�鯜�0��0��٩���|��cD��3�=4��B_b� ���RY��b$ó�BR���sf�&������l�L�X#M*��C����_�L܄:gx�)WΘs���GSb���u��L ���rF$9�'�;\4�Ɍ�q�'�n�[%p.�Q`�������u ��h���N�b`eCQyQ|���l�_���C>L���b꟟3h��Sb�� �#��x��N��xS���������s^�� 88�|�Mz�)��}:�](vbۢ�amŖ࿥� ��0)Q����7���@�0���=?^k(*�J�����}�3ib�kF�n H�jB׻���NO���� �z�� �x}�7p 0�t��f����D����X�.lw��gȔ��h�Ծ�Ų� }6�g� E� |�Lk��LZ���t��eu+=���q���\I�v0쮑�)��Q�ٵpH8�/2?Σ�o�>�J�vpp��h�������o~�f>%bM��M���}���\�//��":�PT��c(v���9v���!��g�ո��Q ��)��U�fV��G��+!� ��35{=�x\�2�+��k�i,y$���~A1��iC�6#)v��C�5�^>�+gǵ�@1�Hy٪7����u;p ps�ϰ�u���/S�� <��aʸ����Gu't�D1�ԝI��<��p��g|����6�j��'p:�tպ�h��X�{�o(7v],��*�}��6�a_����<�u`��Ȯ�r.E�;ˑ�q�io�p�R��"������26�2E�8j�� ]����U��鿍ǜ�v���D��,2���վ�8ϫ�:�e/^AQ����T�H{�WgRl���̊���2Yx���"1�Q�> �wX�Rk,O�]�Lܳ���~V<�����F���8��a��_g~�o.�XCD�?S�t���h���梫A�o�%���~K1ݵ��O1�LyZ�bJ�� E��Q���xpq�i�Cpv��a6��_�:�wejT����]����"����<��u`"���� 2>���o4��5rp"N5k��;�m���{���rZ�b������Φ${#)��`(��Ŵ�g�,;j���%�6�j���.�pyYT��?}-��kB������D���c3q����A`��N��WQ���ū2�0�/^A��Z�W%�N�Q��� ��MI�.��X#P��#����,^Eb�c&��?X�R tA�V�|Y���.�1����!�����؅�⨉ccww���>���i��v��l(J��T�~� �u`��ٵDm �q)���+���Ri�� x/�x��8cyFO�!�/���*�!/��&��,7�<.���N���,�������YDŽ�&ܑ�Q�F1�Bz��)F���P�ʛ�?5����d� �6`�����kQձ �λc�؎�%58�2��Y��&nD�_$Je4��>a��?��!� ��ͨ�|�Ȏ�WZ��S�s��v���8� �j����(�I��&��y�j� �Jb5��m��?��H������Wp��=����g}�G��3��#�|I��,5v珿�] H~�R3�@B��������[☉9Ox~��oMy�=J���;�xUVoj�� �b�U�s�l_��35�t-�(Ճɼ�RB7�U!�q��c��+�x�4�H�_�Q�o֮$[���GO<��4`��&č�\GO�c[�.[*�A�f%m��G/� ň�M�/�r ��W�/Nw~B1U3������J�?��P&���Y�� �)`�ѓ����Z�����1���p]�^l“��W#)lWZ�i����l�U�Q�u`��-����m|xĐ,������_�ƪ|9i:�_��{*(3G�ѧ}�Uo�D+�>m_�?V��Pۅ�15���&}2�|���/p�IOʵ�>���� G�Z9�cmíت�mnz��)yߐb���D�������� �>e}:�)� �r|@�R5q�V�S�����A�10�C%�E�_��'^�8c��������R��7O;�6�[���eKeP�������G������ϦX7�j���b}��OT�GO^j��n*媓����7n����GMC�� � ���t,�k31�R�b �(v�yܴ�ʭ�!��iTh8~�ZY�Z�p��(q��s���RL ?�b���}����c�Ũ�ʊGO^���!��rP�JO��1��5�MJ[��c&~������Z`"��ѓޔ����H1���C&����^|��Ш|�rʼ,�A�wĴ?�����b��5)�t��L��U��)F�|�� �&��g٣O]���oqSU����j���y(��x<��Ϳ3 ���.���FS�k���oYg�2� \_#w��j�{u'r�Q������>���o���;���%n�|�F�*�O�_��L�"�e�9um��Dds�����?.��fu�u����Qb��IW�z |4\0� s�b;�O�v��xOS�s�; G%����T4g��FR�u�rj���(֍ڑb �u�ԖK�D���u��1MK{���1^ q;�� �C=�6\8��F��R��艇�!���%\Y�Ô�U| �88�m��)֓��Nc��L�ve�� C�6z;��o&�X x5�9�:q���6�1�Z��(T����7���>C?�g�c�ļ�x�ѐ�� Z� ���o�o-�0�8j�ہ �x�,�`���'��� ��Ҕ���Oc��Rl��f��~���`�����jj�"�.N�v+���sM������_��]������Z�k��� �g( UOP���������y�εx%�pU����h�2�������(���@��il0���ݽ��QXxp�px-�N�S��( W�O+�轾 n��Fߢ����3M��<;z�)��FBZ�j����c�i��u�/�Q�oF�� �7R�¥ Z��F�L�F�~��#����ȣ��ߨ^<쩡�ݛк���v�џ)��)���M��E>ώ�x4�m#!-�m���!L;vv#~Y[��đ��K�����m����x�9.[,��U����FS �����C���VkZ ���+���ߟ�r�Y٧��IZd/�io�i$�%��͝ب_ֶX�3���ܫ��hNU �� Z����Z�g�k�=���]��=������b���b��JS[�w��j�U(��)���*I =ώ:}-蹞�l�Uj�:��1��}����M�W��m�=̛���� _�� ¾,8��{__�����m{_�P��V���K^n3�e����sw5�ӫh�#�$-�q=�A̟> ,^I}P�^�J$�qY~Q[ Xq���9�<�r�d�sߏǜs�#������%/���y����kKZ�������b��?� S�k�tc�񫝶L���&I ���W!�b �>{#�&�T.^����G��Vj�_���_R��K�p����n,b=`�ż����Y@�^՝��;z�{p�aV��Kk����QXj�/�)y� ��TI�c&F�;FB�G�7w����g� ZZD�G��!����x�� �r_�t��Ƣ!�}�i�/�V��=M����/��#��n��B8 Xx�Ы ^�@�CR�<{䤭����Y��CN��)�e���K��OSƟa $��&�g[i3�.C�6x�rOc���8�TI���;�o�� ��hH6�P�&L{�@�q��6�[���� �G��zp�^���71�j��(�l�`�J�}]���e6�X����☉#͕� ���׈$A�B1�Vj��h㭦IRs��qFBj�w�Q_7�Xk��>y"������N=�M�B0� ��,�C #�o6MR��c���0��|�$�)�ف����"1����!i���xY<���B��9mx�� ��`���,��t�A�>)5ػ�Q���?j��Q�?�cn�>Y�Z�e�� �Tis���v���h�#�� �GMމȇ���p�:���ԴVuږ�8ɼH��]C.���5C!UV;F`m�b��Bk��� ��L�TM�vP���ʍϤj�?��ԯ/Q�r1���N�B`9s"����s�� �TYs����z��� ��&�9S%U԰�> �{��<�ؿ���SM���xB��|H�\3�@!U�|�� �k']������$U�+>��� |�HHM����Lޢ�?��V9i�D!-�@��x�� �TI���î�%�6Z��*��9X�@HMW#�?��n�N� ,o�e6�?tQw��ڱ�.�]-����y����'�:mW�0#!�J82qF�jH -`�ѓ�&��M��0����u� Uγmxϵ��^-��_�\�]����)@0R���t.8�/?ٰ�C��Y]�x���}=sD3�o��j�ަ���Ы�N���uS%U��}Ԥw��HH�>ڗ�jܷ_3gN �����q7�[q���2�l���a���*����A�r���Ǔ��Ԗ+p�8���/��R��GM�� ]j�a����c�d(�JhWko�6��ڎb�j�]i���5���Bj�����3+�3�!\j��1�����U�Z���L��s��L�T�v8�HHmup<��U���\��GMމ�3�R+��w4R�����6�j� XW�M�T!��u(�*!��Pz�,����#���Sq���*�8?vww )kO���a��$�[&��?�*�bB�X� �@���%�����8�]�=���R�r)kO��w�0j��i��M�Tq�� ng$�2\�q�8f�:���e�N1�R�xr<��5 ������;��M��p^�@;��7]R��ꎾ�JtER�.�/(5�v3�R[�@=�h�l����?����l�@;���.���[]�Q*�Z\�4��"1P�'Y��w��x��# ���ǀg���{��5�i��_IUR���z�RɞsyS5q�E����=�@�Y���っ �����v�k��6 ��&��5�1E�o0�|�kp�c��#�j=`�D���WRU�����j̟���J'P� �����w2 �S� �v�:��p�g�3Rv�},�#�����8�b��Z~�&��(F�=�i�><�>gK�M���Jj����0�@H%��,����W�΃�7�R) "�>c�,����� x�ix������ј���^ ��aܖ>�H[�i.UI�Hc �U�1=y�W\���=��S*���G���R~�)AF���=�`�&�����2��h`�D�z���T��󑓶�����J+����?�W+}��C�%��P:|�0H���܆��}-�<;O����C[�~o.��$~��i�}��~�HQ�� �Tv�X������Έ�r=b}$��v����i�z�L��4�:�ȰT|4�~����*��!o�X�QR6��L�k+�#������t/g �lԁߖ����[��Jڶ_N$�k���������*"�.� ���x���s��xX���7jRVbA��A�ʯKҎ��U3����)�zS�NN �_�'��s�?f����)��6������X���!%s�s�A���kʱ>���qƷ�b ��h�g� %n���� �~p�1RE��GM���HH�=�������B�Jiy[<�5 ���ǁJҖ�����g�K���R��*�倳��e��~�HUy��)A���g,K)`�V�w6bRR:��q����L#\�r���cl��K��/�$�s�h�*$�� ������6�����덤�� ����KԖc� 3��Z�9��=�Ɣ�=o>�X � �Ώ�"1�� )a��`�S��JJ�6�k<��U�������-]�� b�m`��{r�y;����T���u��_GR5���*�%6�do�#XRg#���-!nl��$u�3��A�� L+Q{��9�x~�a-�|�H� �vbq[\�NJT%�]���rO8,��E�-F�����w)+?(Y{�Lz�n6����׀��?C������R�~�,)m�䎧�R�7���cww����qpW�ڳ=i.��U`Xf�F�b=�V��LJ��H^LI}��%�} ��|��w���� aG�$�,��^�R^�6 k2�^B�{7��t�������V�%@G�q� p�%R�zģN_ ��HHI[7�ֱ�>(��<�c e�{%kϊ����P�+��SL'�T�cM���J����WR���m ���ŏ�"�w)qc e�f�꒵i?��b7�b����(�'�"��2r%��������~�HUS�1���\<��(`�1�W����x��9�=�8HY9��m:X��1�8�b��g�����D1��u ���~|H��;K��-��U�ep�,,� C�1 RV.���M�R�5�άh����,�t��W�O8W���C�$ XRV�sQS]3G�J|�1��2����� [�v�M� ������:��k�#����~tH�3�0Rf-�����HYݺ-`I�9�%l�I��D�T�m\ ����S�{]��9�gO���ڒ�M���NCV\��G��*����2���J�R�Ũ;�R��ҏ^���ڽ�̱�mq�1E�u?�To�3I���)��y^��#�j�J�w���^�Ń�j��^�v����vl����B_��⋌�P�4x>0$�c>���K†A�ļ9s_V���jT��t0l�#������m��>E��-�,�,�x�,��-�W��)������سo&�9�6�R�E XR.6b���Xw�+)G���A�����E�v�L�)�͞K4�$p=�Ũ��i_ѱ�O�j��b�� HY����/���+@�θH9޼]�N�ԥ��%n�{����� �&zjT�?�� �Ty) s^�U��L�����lb�,�P�iTf�^���<À�]������� ��62R^V��7)S!nl�l��S�6~�͝�V�}�-=%*� ʻ>���G�� ���������D��nK��<��y��&>L����Py7'r=Hj���� ���9��V`[c"�*��^�8H��pc�����O�8�b�nU�`4���J��ȪA�Ƌ#��1_\ XϘH��PR���gi�k(�~G�~��0��D���A���A����_2�p�|�J�묭a����2���\N�C�r�]����M���_0 �^T��%e#����vD��^��%��x��y-n���}�-E�\�3�aS%�yN!�r_��{ �)s���A��w ���ڼp1pEAk�~v�<�:`'ӭ^�5 �����A�r���X�������OI驻��T �(��dk�)�_�\<��w���^��W�I��"�RFj3��V# ���M<,o�J��.H��#�\�SK���s]���� )��9> P��u�������A�*�B�Y�]����y�B�"�l�\�ey��� ��hH���*t��b�K)3��� IK�Z��򹞋X�jN�� �n� �*n>k�]��X�_��d�!�ry��BH� � ]��*R�� ��0(#'�7 ������%es9??��ښFC��,ՁQP��������j�����AR��J�\Ρw� �K��#��j���ah�g�w�;�2$�l*�)� ��%���Xq5�!U᢯�6Re] |�0����[�_�����_64�c�h�&�_}��i�L8K��Eg�Ҏ�7 M��/�\`|.p,�~`�a���=�BR?x�ܐrQ���8K� XR���2M�8�f ?�`s�gW�S%�"� ������Ԉ 7R%���$� N������}��?QL1|-э�ټwI�Z��%���pv�L���3Hk>,I����m�g�W���7{��E�� x�PHx�7�3R�����A�� ���@R�S�� CC���� �������!\ȟ���5I��XR^Z���xHл�$Q[��ŝ�40 (�>�+� �_C ���>���BR�t��<,T�r�T �������{���O�����/�H��+˟Pl6 I ���B)/�V���C��<6���a��2����~�����(�XwV4�g�n���XR� ϱ5�ǀHٻ?tw�똤Eyxp���{�#���WK��� �q����G%5���]�,���(�0ӈH����� HZ���])ג=K1j��&��G(FbM�@����)%�I` XR�����g ʔ�� KZ�G(v��P,�<`�[� K���n^ ��SJR���sAʠ�5xՅF`�0&R�b�V� ��t��x�:Ea�UE�/{�f��i�2;.I����A��wW8��/��t�T�x�A��GOo��N�?�G���}�l L�(���n�����`�Zv?���p�B��8K�_g�����I�+ܗ � #��i��?���ޙ�.��) p����$�u�tc �~DžfՈE�o3��l/)I-U�?a�ԅ�^��j�x�A�r����A�� ΧX��������}�DmZ@QLےbTXGd�.^|x�KHR{���|Ε�W_h]�� ���I��J`[�G9�{��)�.y�)�� ��<���D�*��zk�(ּ���Ya����O���8S����?��2-��� ������H13����#pK"���I`]`O� ��h�&=�S���F1Z�/Ie����D1R�W�a�"t'�x?!)Ou:��1 ��|��6��gt\s�����7�=�z_;�ؠ��>�0X Y��A1]q�p?�p�_���k+J*��Y�@HI>�^��?�g�t.06R�n ��,��`�� ��?)�;p pSF9����Z����X���L�����BJP�W���j���gQ|�&)7!�� Hj��Q��t���<| ؅��W�5 x �W��� �� HIz�Y���oV���M�G�P��� Hj��n`+�\�(d��N���W)F+I�rS�[���|��/a�����`K��|�ͻ�0Hj�{�R,���Q=��\� (F�}\�W�������R)A�g��SG`I��s�n���AR�=|�8�$}�G(v��C��$)s���� FBJ�?]�_�u XRv�ύ��6z�� �Ũ�G[��3��6-�T9�H��z��p����W�̞ú������� X�����g�큽�=�7C�u������fzI���$��)�k�i���^q��k��-) � ���0H*�N` �QZ��k�k]/���t���nn���sI�^Gu't=��7$�� Z;�{���8�^��jB��%� ��IItR�QS7�[�ϭ���3 �$�_���O�Q�J`7�!�]���W��"��W,)�����Iy �W��� �AJA�;K���WG��`IY�{8���k$I�$�^��%����9�.�^(`��N|���LJ�%�@�$I�}ֽp���=FB*�xN��=gI?Q{٥�4B)m���w �$I���gc~d��Z@G�9K��� X�?7)a�K�%�݅K�$IZ��-`I���p����C ������U��6�$I��\0��>!��9�k}��� Xa� ����II�S���0H�$I �H ?1R��.�Ч�j���:�4~R�w���@p�$I����r��A*�u��}��W�j�WFPJ����$I�➓/���6#!�� L�Ӿ��+ X36�x�8J �|+L;v���$I���o�4����3���0����1�R2�0��M� I�$-E}��@����,pS�^ޟR[���/����s¹'��0H�$IKyf��Ÿ���f�������VO�π�FT*�����a$I��>��H��e��~����V���Y/3�R�/�)��>d$I��>2��8`Cj���w�,n@�FU*�9tt�f$I��~<;��=�/4RD~����@��� X��-�ѕ�z��ἱI�$��:� ԍ��R �a�@��b X��{��+�Qx��u�q�$I�Л�z�o���� /~3\8����ڒ���4B������N7�$IҀ���j �V]n1�8H�$I��YFBj�3��̚�̵���ja ����p���p��� �$I���s/3R� Ӻ�-�Yj+L;�.0�R�́��I�$�A���v?� #!5�"��aʄ��j}���U���Km�ɽ��H�$Ij��C���Ys?h$I��Dl8�4��3���.��v�}���m���7�UiI=�&�=0L�g0$I��4���: ���emb�e��`���� e�Qbm�0u�? �$I�T!Sƍ'�-���s��v�)s#C��0�:�XB���2���a� w I�$�zbww�{�."p��Pz�O� �=�Ɔ�\����[� �����o($I������aw]�`���E���).K���v�i�:�L�*#gР7[��$I�����yG���PI=@��R� �4�y��R~�̮�´cg I�$I/<�t�P�ͽ ��h�Dg�o� 94����Z^k盇�΄8��I���56��^�W�$I�^�0���̜�N�?4*�H`237}g��+h���x�o��q)��SJ@p|�`�� �$I��%>������-�h���O�0e�O�>��\ԣNߌZ��D6��R�=K ����~n($I�$��y�3��D>o4�b#px�2���$��yڪt���z���W���~a�� �$I��~?�x<��e{W���g��ô�{�x$/�=�{t�G�0�7��e���a���B�$IҀ�yG��^S�卆�"puS��3��*�E=洣��,`9�>��'���Bww�pH�$IZ��ݑ�nC�㧄���Pc�_9��sO gw����J=l1�:mKB�>�����Ab<4L�p���$I�����b �o1Z���Q�@8�5�b�̍ S'�F���,�F��e���,^I�$Ij���E�dù{�l4� �8�Ys_�s� Z8.��x �m"+{~��?q,��Z D�!I�$��ϻ�'|X�h��B�)=��…'�]��M�>��5��� r�g���otԎ 獽�PH�$Ij����IP���hh)n#�cÔq���A'�ug5qw���U�&r�F|1��E%I�$%����]��!'�3�AFD/;C�k_`�9��� �v�!ٴt�PV�;��x`�'��*b�Qa� w I�$I�x�5� �����FC�3D����_��~��A�_�#O݆���Dv��V?<���q�w�+I�$I�{��=�Z�8"�.#RI���Y�yj���Ǫ����=f�D�l�9�%�M�,�����a8$I�$��Yw�i[�7�ݍFe�$�s��1��ՋBV�A?�`�]#!��oz����4zjLJ���o8$I�$%�@3j�A��a4��(�o�� �;�p,,dya�=��F9ً[��LS���PH�$IJ�Y�Љ+3��> 5"���3�9�aZ�<ñh!�{T�pB�G��k��j}��S�p��� �$I��lvF��.���F$I� ��z<� '\���K*qq��.f�<���2�Y�!�S"-\I�$I��Yw�č��jF$ w9��� \ߪB�.�1�v!Ʊ���?+��r�:�^�!I�$�BϹ�B� ����H��"�B�;L��'G[ 4�U�����#5>�੐�)|#�o0��aڱ���$I���>�}��k&�1`U#�V��?��Ys��V x���>�{t���1�[�I~D���&(I�$I/{��H�0fw�"�q"���y�%��4����� I�X�y�E~��M�3 8Xψ��L}q�������E�$I���[�> �nD�?~�s����f��� �����]o�΁� �cT��6"?'�_�Ἣ� �$I��>�~��.f�|'!������N�?�⟩����0��G KkX�Z�E��]�ޡ;�����/����&�?k�� O�ۘH�$IR��������ۀw�XӨ��<�7@��P��nS��04�a����Ӷ�p�.��:��@���\IWQ�J6�s�S%I�$���e��5��ڑ���v`�3:���x'�;��w��q_�vp�gHyX�Z� ��3�gЂ7{{���E�����uԹ�n�±��}�$I�$��������8t;b|��5��91n��ء����Q"�P������6���O�5�i���� }�i�R�̈́���%�Q�̄p!�I䮢�]��������O{�H�$IR�ϻ�9��s֧�� a=`-� aB\X��0"+5"C�1�H�b?߮����3x��3�&�g�ş�g��g����l��_���h�����Z^,`5�?���ߎ��vĸ%�̀M!�OZC2#0x ����LJ��0�� �G�w����$I�$I�}�<�{Eb�+y���;�iI,`����ܚ��F�����:�5��ܛ�A�8���-O�-|�8�K�7��s�|#�Z8�a&�>���<��a&����/V��tb�t��L��ʌI�$I�$I�$I�$I�$I�$IRj���D��D�%tEXtdate:create2022-05-31T04:40:26+00:00�!Î%tEXtdate:modify2022-05-31T04:40:26+00:00�|{2IEND�B`�Mini Shell

HOME


Mini Shell 1.0
DIR:/proc/self/root/proc/self/root/lib/python2.7/site-packages/ply/
Upload File :
Current File : //proc/self/root/proc/self/root/lib/python2.7/site-packages/ply/cpp.py
# -----------------------------------------------------------------------------
# cpp.py
#
# Author:  David Beazley (http://www.dabeaz.com)
# Copyright (C) 2007
# All rights reserved
#
# This module implements an ANSI-C style lexical preprocessor for PLY. 
# -----------------------------------------------------------------------------
from __future__ import generators

# -----------------------------------------------------------------------------
# Default preprocessor lexer definitions.   These tokens are enough to get
# a basic preprocessor working.   Other modules may import these if they want
# -----------------------------------------------------------------------------

tokens = (
   'CPP_ID','CPP_INTEGER', 'CPP_FLOAT', 'CPP_STRING', 'CPP_CHAR', 'CPP_WS', 'CPP_COMMENT', 'CPP_POUND','CPP_DPOUND'
)

literals = "+-*/%|&~^<>=!?()[]{}.,;:\\\'\""

# Whitespace
def t_CPP_WS(t):
    r'\s+'
    t.lexer.lineno += t.value.count("\n")
    return t

t_CPP_POUND = r'\#'
t_CPP_DPOUND = r'\#\#'

# Identifier
t_CPP_ID = r'[A-Za-z_][\w_]*'

# Integer literal
def CPP_INTEGER(t):
    r'(((((0x)|(0X))[0-9a-fA-F]+)|(\d+))([uU]|[lL]|[uU][lL]|[lL][uU])?)'
    return t

t_CPP_INTEGER = CPP_INTEGER

# Floating literal
t_CPP_FLOAT = r'((\d+)(\.\d+)(e(\+|-)?(\d+))? | (\d+)e(\+|-)?(\d+))([lL]|[fF])?'

# String literal
def t_CPP_STRING(t):
    r'\"([^\\\n]|(\\(.|\n)))*?\"'
    t.lexer.lineno += t.value.count("\n")
    return t

# Character constant 'c' or L'c'
def t_CPP_CHAR(t):
    r'(L)?\'([^\\\n]|(\\(.|\n)))*?\''
    t.lexer.lineno += t.value.count("\n")
    return t

# Comment
def t_CPP_COMMENT(t):
    r'(/\*(.|\n)*?\*/)|(//.*?\n)'
    t.lexer.lineno += t.value.count("\n")
    return t
    
def t_error(t):
    t.type = t.value[0]
    t.value = t.value[0]
    t.lexer.skip(1)
    return t

import re
import copy
import time
import os.path

# -----------------------------------------------------------------------------
# trigraph()
# 
# Given an input string, this function replaces all trigraph sequences. 
# The following mapping is used:
#
#     ??=    #
#     ??/    \
#     ??'    ^
#     ??(    [
#     ??)    ]
#     ??!    |
#     ??<    {
#     ??>    }
#     ??-    ~
# -----------------------------------------------------------------------------

_trigraph_pat = re.compile(r'''\?\?[=/\'\(\)\!<>\-]''')
_trigraph_rep = {
    '=':'#',
    '/':'\\',
    "'":'^',
    '(':'[',
    ')':']',
    '!':'|',
    '<':'{',
    '>':'}',
    '-':'~'
}

def trigraph(input):
    return _trigraph_pat.sub(lambda g: _trigraph_rep[g.group()[-1]],input)

# ------------------------------------------------------------------
# Macro object
#
# This object holds information about preprocessor macros
#
#    .name      - Macro name (string)
#    .value     - Macro value (a list of tokens)
#    .arglist   - List of argument names
#    .variadic  - Boolean indicating whether or not variadic macro
#    .vararg    - Name of the variadic parameter
#
# When a macro is created, the macro replacement token sequence is
# pre-scanned and used to create patch lists that are later used
# during macro expansion
# ------------------------------------------------------------------

class Macro(object):
    def __init__(self,name,value,arglist=None,variadic=False):
        self.name = name
        self.value = value
        self.arglist = arglist
        self.variadic = variadic
        if variadic:
            self.vararg = arglist[-1]
        self.source = None

# ------------------------------------------------------------------
# Preprocessor object
#
# Object representing a preprocessor.  Contains macro definitions,
# include directories, and other information
# ------------------------------------------------------------------

class Preprocessor(object):
    def __init__(self,lexer=None):
        if lexer is None:
            lexer = lex.lexer
        self.lexer = lexer
        self.macros = { }
        self.path = []
        self.temp_path = []

        # Probe the lexer for selected tokens
        self.lexprobe()

        tm = time.localtime()
        self.define("__DATE__ \"%s\"" % time.strftime("%b %d %Y",tm))
        self.define("__TIME__ \"%s\"" % time.strftime("%H:%M:%S",tm))
        self.parser = None

    # -----------------------------------------------------------------------------
    # tokenize()
    #
    # Utility function. Given a string of text, tokenize into a list of tokens
    # -----------------------------------------------------------------------------

    def tokenize(self,text):
        tokens = []
        self.lexer.input(text)
        while True:
            tok = self.lexer.token()
            if not tok: break
            tokens.append(tok)
        return tokens

    # ---------------------------------------------------------------------
    # error()
    #
    # Report a preprocessor error/warning of some kind
    # ----------------------------------------------------------------------

    def error(self,file,line,msg):
        print("%s:%d %s" % (file,line,msg))

    # ----------------------------------------------------------------------
    # lexprobe()
    #
    # This method probes the preprocessor lexer object to discover
    # the token types of symbols that are important to the preprocessor.
    # If this works right, the preprocessor will simply "work"
    # with any suitable lexer regardless of how tokens have been named.
    # ----------------------------------------------------------------------

    def lexprobe(self):

        # Determine the token type for identifiers
        self.lexer.input("identifier")
        tok = self.lexer.token()
        if not tok or tok.value != "identifier":
            print("Couldn't determine identifier type")
        else:
            self.t_ID = tok.type

        # Determine the token type for integers
        self.lexer.input("12345")
        tok = self.lexer.token()
        if not tok or int(tok.value) != 12345:
            print("Couldn't determine integer type")
        else:
            self.t_INTEGER = tok.type
            self.t_INTEGER_TYPE = type(tok.value)

        # Determine the token type for strings enclosed in double quotes
        self.lexer.input("\"filename\"")
        tok = self.lexer.token()
        if not tok or tok.value != "\"filename\"":
            print("Couldn't determine string type")
        else:
            self.t_STRING = tok.type

        # Determine the token type for whitespace--if any
        self.lexer.input("  ")
        tok = self.lexer.token()
        if not tok or tok.value != "  ":
            self.t_SPACE = None
        else:
            self.t_SPACE = tok.type

        # Determine the token type for newlines
        self.lexer.input("\n")
        tok = self.lexer.token()
        if not tok or tok.value != "\n":
            self.t_NEWLINE = None
            print("Couldn't determine token for newlines")
        else:
            self.t_NEWLINE = tok.type

        self.t_WS = (self.t_SPACE, self.t_NEWLINE)

        # Check for other characters used by the preprocessor
        chars = [ '<','>','#','##','\\','(',')',',','.']
        for c in chars:
            self.lexer.input(c)
            tok = self.lexer.token()
            if not tok or tok.value != c:
                print("Unable to lex '%s' required for preprocessor" % c)

    # ----------------------------------------------------------------------
    # add_path()
    #
    # Adds a search path to the preprocessor.  
    # ----------------------------------------------------------------------

    def add_path(self,path):
        self.path.append(path)

    # ----------------------------------------------------------------------
    # group_lines()
    #
    # Given an input string, this function splits it into lines.  Trailing whitespace
    # is removed.   Any line ending with \ is grouped with the next line.  This
    # function forms the lowest level of the preprocessor---grouping into text into
    # a line-by-line format.
    # ----------------------------------------------------------------------

    def group_lines(self,input):
        lex = self.lexer.clone()
        lines = [x.rstrip() for x in input.splitlines()]
        for i in xrange(len(lines)):
            j = i+1
            while lines[i].endswith('\\') and (j < len(lines)):
                lines[i] = lines[i][:-1]+lines[j]
                lines[j] = ""
                j += 1

        input = "\n".join(lines)
        lex.input(input)
        lex.lineno = 1

        current_line = []
        while True:
            tok = lex.token()
            if not tok:
                break
            current_line.append(tok)
            if tok.type in self.t_WS and '\n' in tok.value:
                yield current_line
                current_line = []

        if current_line:
            yield current_line

    # ----------------------------------------------------------------------
    # tokenstrip()
    # 
    # Remove leading/trailing whitespace tokens from a token list
    # ----------------------------------------------------------------------

    def tokenstrip(self,tokens):
        i = 0
        while i < len(tokens) and tokens[i].type in self.t_WS:
            i += 1
        del tokens[:i]
        i = len(tokens)-1
        while i >= 0 and tokens[i].type in self.t_WS:
            i -= 1
        del tokens[i+1:]
        return tokens


    # ----------------------------------------------------------------------
    # collect_args()
    #
    # Collects comma separated arguments from a list of tokens.   The arguments
    # must be enclosed in parenthesis.  Returns a tuple (tokencount,args,positions)
    # where tokencount is the number of tokens consumed, args is a list of arguments,
    # and positions is a list of integers containing the starting index of each
    # argument.  Each argument is represented by a list of tokens.
    #
    # When collecting arguments, leading and trailing whitespace is removed
    # from each argument.  
    #
    # This function properly handles nested parenthesis and commas---these do not
    # define new arguments.
    # ----------------------------------------------------------------------

    def collect_args(self,tokenlist):
        args = []
        positions = []
        current_arg = []
        nesting = 1
        tokenlen = len(tokenlist)
    
        # Search for the opening '('.
        i = 0
        while (i < tokenlen) and (tokenlist[i].type in self.t_WS):
            i += 1

        if (i < tokenlen) and (tokenlist[i].value == '('):
            positions.append(i+1)
        else:
            self.error(self.source,tokenlist[0].lineno,"Missing '(' in macro arguments")
            return 0, [], []

        i += 1

        while i < tokenlen:
            t = tokenlist[i]
            if t.value == '(':
                current_arg.append(t)
                nesting += 1
            elif t.value == ')':
                nesting -= 1
                if nesting == 0:
                    if current_arg:
                        args.append(self.tokenstrip(current_arg))
                        positions.append(i)
                    return i+1,args,positions
                current_arg.append(t)
            elif t.value == ',' and nesting == 1:
                args.append(self.tokenstrip(current_arg))
                positions.append(i+1)
                current_arg = []
            else:
                current_arg.append(t)
            i += 1
    
        # Missing end argument
        self.error(self.source,tokenlist[-1].lineno,"Missing ')' in macro arguments")
        return 0, [],[]

    # ----------------------------------------------------------------------
    # macro_prescan()
    #
    # Examine the macro value (token sequence) and identify patch points
    # This is used to speed up macro expansion later on---we'll know
    # right away where to apply patches to the value to form the expansion
    # ----------------------------------------------------------------------
    
    def macro_prescan(self,macro):
        macro.patch     = []             # Standard macro arguments 
        macro.str_patch = []             # String conversion expansion
        macro.var_comma_patch = []       # Variadic macro comma patch
        i = 0
        while i < len(macro.value):
            if macro.value[i].type == self.t_ID and macro.value[i].value in macro.arglist:
                argnum = macro.arglist.index(macro.value[i].value)
                # Conversion of argument to a string
                if i > 0 and macro.value[i-1].value == '#':
                    macro.value[i] = copy.copy(macro.value[i])
                    macro.value[i].type = self.t_STRING
                    del macro.value[i-1]
                    macro.str_patch.append((argnum,i-1))
                    continue
                # Concatenation
                elif (i > 0 and macro.value[i-1].value == '##'):
                    macro.patch.append(('c',argnum,i-1))
                    del macro.value[i-1]
                    continue
                elif ((i+1) < len(macro.value) and macro.value[i+1].value == '##'):
                    macro.patch.append(('c',argnum,i))
                    i += 1
                    continue
                # Standard expansion
                else:
                    macro.patch.append(('e',argnum,i))
            elif macro.value[i].value == '##':
                if macro.variadic and (i > 0) and (macro.value[i-1].value == ',') and \
                        ((i+1) < len(macro.value)) and (macro.value[i+1].type == self.t_ID) and \
                        (macro.value[i+1].value == macro.vararg):
                    macro.var_comma_patch.append(i-1)
            i += 1
        macro.patch.sort(key=lambda x: x[2],reverse=True)

    # ----------------------------------------------------------------------
    # macro_expand_args()
    #
    # Given a Macro and list of arguments (each a token list), this method
    # returns an expanded version of a macro.  The return value is a token sequence
    # representing the replacement macro tokens
    # ----------------------------------------------------------------------

    def macro_expand_args(self,macro,args):
        # Make a copy of the macro token sequence
        rep = [copy.copy(_x) for _x in macro.value]

        # Make string expansion patches.  These do not alter the length of the replacement sequence
        
        str_expansion = {}
        for argnum, i in macro.str_patch:
            if argnum not in str_expansion:
                str_expansion[argnum] = ('"%s"' % "".join([x.value for x in args[argnum]])).replace("\\","\\\\")
            rep[i] = copy.copy(rep[i])
            rep[i].value = str_expansion[argnum]

        # Make the variadic macro comma patch.  If the variadic macro argument is empty, we get rid
        comma_patch = False
        if macro.variadic and not args[-1]:
            for i in macro.var_comma_patch:
                rep[i] = None
                comma_patch = True

        # Make all other patches.   The order of these matters.  It is assumed that the patch list
        # has been sorted in reverse order of patch location since replacements will cause the
        # size of the replacement sequence to expand from the patch point.
        
        expanded = { }
        for ptype, argnum, i in macro.patch:
            # Concatenation.   Argument is left unexpanded
            if ptype == 'c':
                rep[i:i+1] = args[argnum]
            # Normal expansion.  Argument is macro expanded first
            elif ptype == 'e':
                if argnum not in expanded:
                    expanded[argnum] = self.expand_macros(args[argnum])
                rep[i:i+1] = expanded[argnum]

        # Get rid of removed comma if necessary
        if comma_patch:
            rep = [_i for _i in rep if _i]

        return rep


    # ----------------------------------------------------------------------
    # expand_macros()
    #
    # Given a list of tokens, this function performs macro expansion.
    # The expanded argument is a dictionary that contains macros already
    # expanded.  This is used to prevent infinite recursion.
    # ----------------------------------------------------------------------

    def expand_macros(self,tokens,expanded=None):
        if expanded is None:
            expanded = {}
        i = 0
        while i < len(tokens):
            t = tokens[i]
            if t.type == self.t_ID:
                if t.value in self.macros and t.value not in expanded:
                    # Yes, we found a macro match
                    expanded[t.value] = True
                    
                    m = self.macros[t.value]
                    if not m.arglist:
                        # A simple macro
                        ex = self.expand_macros([copy.copy(_x) for _x in m.value],expanded)
                        for e in ex:
                            e.lineno = t.lineno
                        tokens[i:i+1] = ex
                        i += len(ex)
                    else:
                        # A macro with arguments
                        j = i + 1
                        while j < len(tokens) and tokens[j].type in self.t_WS:
                            j += 1
                        if tokens[j].value == '(':
                            tokcount,args,positions = self.collect_args(tokens[j:])
                            if not m.variadic and len(args) !=  len(m.arglist):
                                self.error(self.source,t.lineno,"Macro %s requires %d arguments" % (t.value,len(m.arglist)))
                                i = j + tokcount
                            elif m.variadic and len(args) < len(m.arglist)-1:
                                if len(m.arglist) > 2:
                                    self.error(self.source,t.lineno,"Macro %s must have at least %d arguments" % (t.value, len(m.arglist)-1))
                                else:
                                    self.error(self.source,t.lineno,"Macro %s must have at least %d argument" % (t.value, len(m.arglist)-1))
                                i = j + tokcount
                            else:
                                if m.variadic:
                                    if len(args) == len(m.arglist)-1:
                                        args.append([])
                                    else:
                                        args[len(m.arglist)-1] = tokens[j+positions[len(m.arglist)-1]:j+tokcount-1]
                                        del args[len(m.arglist):]
                                        
                                # Get macro replacement text
                                rep = self.macro_expand_args(m,args)
                                rep = self.expand_macros(rep,expanded)
                                for r in rep:
                                    r.lineno = t.lineno
                                tokens[i:j+tokcount] = rep
                                i += len(rep)
                    del expanded[t.value]
                    continue
                elif t.value == '__LINE__':
                    t.type = self.t_INTEGER
                    t.value = self.t_INTEGER_TYPE(t.lineno)
                
            i += 1
        return tokens

    # ----------------------------------------------------------------------    
    # evalexpr()
    # 
    # Evaluate an expression token sequence for the purposes of evaluating
    # integral expressions.
    # ----------------------------------------------------------------------

    def evalexpr(self,tokens):
        # tokens = tokenize(line)
        # Search for defined macros
        i = 0
        while i < len(tokens):
            if tokens[i].type == self.t_ID and tokens[i].value == 'defined':
                j = i + 1
                needparen = False
                result = "0L"
                while j < len(tokens):
                    if tokens[j].type in self.t_WS:
                        j += 1
                        continue
                    elif tokens[j].type == self.t_ID:
                        if tokens[j].value in self.macros:
                            result = "1L"
                        else:
                            result = "0L"
                        if not needparen: break
                    elif tokens[j].value == '(':
                        needparen = True
                    elif tokens[j].value == ')':
                        break
                    else:
                        self.error(self.source,tokens[i].lineno,"Malformed defined()")
                    j += 1
                tokens[i].type = self.t_INTEGER
                tokens[i].value = self.t_INTEGER_TYPE(result)
                del tokens[i+1:j+1]
            i += 1
        tokens = self.expand_macros(tokens)
        for i,t in enumerate(tokens):
            if t.type == self.t_ID:
                tokens[i] = copy.copy(t)
                tokens[i].type = self.t_INTEGER
                tokens[i].value = self.t_INTEGER_TYPE("0L")
            elif t.type == self.t_INTEGER:
                tokens[i] = copy.copy(t)
                # Strip off any trailing suffixes
                tokens[i].value = str(tokens[i].value)
                while tokens[i].value[-1] not in "0123456789abcdefABCDEF":
                    tokens[i].value = tokens[i].value[:-1]
        
        expr = "".join([str(x.value) for x in tokens])
        expr = expr.replace("&&"," and ")
        expr = expr.replace("||"," or ")
        expr = expr.replace("!"," not ")
        try:
            result = eval(expr)
        except StandardError:
            self.error(self.source,tokens[0].lineno,"Couldn't evaluate expression")
            result = 0
        return result

    # ----------------------------------------------------------------------
    # parsegen()
    #
    # Parse an input string/
    # ----------------------------------------------------------------------
    def parsegen(self,input,source=None):

        # Replace trigraph sequences
        t = trigraph(input)
        lines = self.group_lines(t)

        if not source:
            source = ""
            
        self.define("__FILE__ \"%s\"" % source)

        self.source = source
        chunk = []
        enable = True
        iftrigger = False
        ifstack = []

        for x in lines:
            for i,tok in enumerate(x):
                if tok.type not in self.t_WS: break
            if tok.value == '#':
                # Preprocessor directive

                for tok in x:
                    if tok in self.t_WS and '\n' in tok.value:
                        chunk.append(tok)
                
                dirtokens = self.tokenstrip(x[i+1:])
                if dirtokens:
                    name = dirtokens[0].value
                    args = self.tokenstrip(dirtokens[1:])
                else:
                    name = ""
                    args = []
                
                if name == 'define':
                    if enable:
                        for tok in self.expand_macros(chunk):
                            yield tok
                        chunk = []
                        self.define(args)
                elif name == 'include':
                    if enable:
                        for tok in self.expand_macros(chunk):
                            yield tok
                        chunk = []
                        oldfile = self.macros['__FILE__']
                        for tok in self.include(args):
                            yield tok
                        self.macros['__FILE__'] = oldfile
                        self.source = source
                elif name == 'undef':
                    if enable:
                        for tok in self.expand_macros(chunk):
                            yield tok
                        chunk = []
                        self.undef(args)
                elif name == 'ifdef':
                    ifstack.append((enable,iftrigger))
                    if enable:
                        if not args[0].value in self.macros:
                            enable = False
                            iftrigger = False
                        else:
                            iftrigger = True
                elif name == 'ifndef':
                    ifstack.append((enable,iftrigger))
                    if enable:
                        if args[0].value in self.macros:
                            enable = False
                            iftrigger = False
                        else:
                            iftrigger = True
                elif name == 'if':
                    ifstack.append((enable,iftrigger))
                    if enable:
                        result = self.evalexpr(args)
                        if not result:
                            enable = False
                            iftrigger = False
                        else:
                            iftrigger = True
                elif name == 'elif':
                    if ifstack:
                        if ifstack[-1][0]:     # We only pay attention if outer "if" allows this
                            if enable:         # If already true, we flip enable False
                                enable = False
                            elif not iftrigger:   # If False, but not triggered yet, we'll check expression
                                result = self.evalexpr(args)
                                if result:
                                    enable  = True
                                    iftrigger = True
                    else:
                        self.error(self.source,dirtokens[0].lineno,"Misplaced #elif")
                        
                elif name == 'else':
                    if ifstack:
                        if ifstack[-1][0]:
                            if enable:
                                enable = False
                            elif not iftrigger:
                                enable = True
                                iftrigger = True
                    else:
                        self.error(self.source,dirtokens[0].lineno,"Misplaced #else")

                elif name == 'endif':
                    if ifstack:
                        enable,iftrigger = ifstack.pop()
                    else:
                        self.error(self.source,dirtokens[0].lineno,"Misplaced #endif")
                else:
                    # Unknown preprocessor directive
                    pass

            else:
                # Normal text
                if enable:
                    chunk.extend(x)

        for tok in self.expand_macros(chunk):
            yield tok
        chunk = []

    # ----------------------------------------------------------------------
    # include()
    #
    # Implementation of file-inclusion
    # ----------------------------------------------------------------------

    def include(self,tokens):
        # Try to extract the filename and then process an include file
        if not tokens:
            return
        if tokens:
            if tokens[0].value != '<' and tokens[0].type != self.t_STRING:
                tokens = self.expand_macros(tokens)

            if tokens[0].value == '<':
                # Include <...>
                i = 1
                while i < len(tokens):
                    if tokens[i].value == '>':
                        break
                    i += 1
                else:
                    print("Malformed #include <...>")
                    return
                filename = "".join([x.value for x in tokens[1:i]])
                path = self.path + [""] + self.temp_path
            elif tokens[0].type == self.t_STRING:
                filename = tokens[0].value[1:-1]
                path = self.temp_path + [""] + self.path
            else:
                print("Malformed #include statement")
                return
        for p in path:
            iname = os.path.join(p,filename)
            try:
                data = open(iname,"r").read()
                dname = os.path.dirname(iname)
                if dname:
                    self.temp_path.insert(0,dname)
                for tok in self.parsegen(data,filename):
                    yield tok
                if dname:
                    del self.temp_path[0]
                break
            except IOError:
                pass
        else:
            print("Couldn't find '%s'" % filename)

    # ----------------------------------------------------------------------
    # define()
    #
    # Define a new macro
    # ----------------------------------------------------------------------

    def define(self,tokens):
        if isinstance(tokens,(str,unicode)):
            tokens = self.tokenize(tokens)

        linetok = tokens
        try:
            name = linetok[0]
            if len(linetok) > 1:
                mtype = linetok[1]
            else:
                mtype = None
            if not mtype:
                m = Macro(name.value,[])
                self.macros[name.value] = m
            elif mtype.type in self.t_WS:
                # A normal macro
                m = Macro(name.value,self.tokenstrip(linetok[2:]))
                self.macros[name.value] = m
            elif mtype.value == '(':
                # A macro with arguments
                tokcount, args, positions = self.collect_args(linetok[1:])
                variadic = False
                for a in args:
                    if variadic:
                        print("No more arguments may follow a variadic argument")
                        break
                    astr = "".join([str(_i.value) for _i in a])
                    if astr == "...":
                        variadic = True
                        a[0].type = self.t_ID
                        a[0].value = '__VA_ARGS__'
                        variadic = True
                        del a[1:]
                        continue
                    elif astr[-3:] == "..." and a[0].type == self.t_ID:
                        variadic = True
                        del a[1:]
                        # If, for some reason, "." is part of the identifier, strip off the name for the purposes
                        # of macro expansion
                        if a[0].value[-3:] == '...':
                            a[0].value = a[0].value[:-3]
                        continue
                    if len(a) > 1 or a[0].type != self.t_ID:
                        print("Invalid macro argument")
                        break
                else:
                    mvalue = self.tokenstrip(linetok[1+tokcount:])
                    i = 0
                    while i < len(mvalue):
                        if i+1 < len(mvalue):
                            if mvalue[i].type in self.t_WS and mvalue[i+1].value == '##':
                                del mvalue[i]
                                continue
                            elif mvalue[i].value == '##' and mvalue[i+1].type in self.t_WS:
                                del mvalue[i+1]
                        i += 1
                    m = Macro(name.value,mvalue,[x[0].value for x in args],variadic)
                    self.macro_prescan(m)
                    self.macros[name.value] = m
            else:
                print("Bad macro definition")
        except LookupError:
            print("Bad macro definition")

    # ----------------------------------------------------------------------
    # undef()
    #
    # Undefine a macro
    # ----------------------------------------------------------------------

    def undef(self,tokens):
        id = tokens[0].value
        try:
            del self.macros[id]
        except LookupError:
            pass

    # ----------------------------------------------------------------------
    # parse()
    #
    # Parse input text.
    # ----------------------------------------------------------------------
    def parse(self,input,source=None,ignore={}):
        self.ignore = ignore
        self.parser = self.parsegen(input,source)
        
    # ----------------------------------------------------------------------
    # token()
    #
    # Method to return individual tokens
    # ----------------------------------------------------------------------
    def token(self):
        try:
            while True:
                tok = next(self.parser)
                if tok.type not in self.ignore: return tok
        except StopIteration:
            self.parser = None
            return None

if __name__ == '__main__':
    import ply.lex as lex
    lexer = lex.lex()

    # Run a preprocessor
    import sys
    f = open(sys.argv[1])
    input = f.read()

    p = Preprocessor(lexer)
    p.parse(input,sys.argv[1])
    while True:
        tok = p.token()
        if not tok: break
        print(p.source, tok)